diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..bfb53a12e921f1a9f3d93f4acc198706efba8fea --- /dev/null +++ b/README.md @@ -0,0 +1,77 @@ +--- +tags: +- generated_from_trainer +model-index: +- name: lora-out + results: [] +--- + + + +[Built with Axolotl](https://github.com/OpenAccess-AI-Collective/axolotl) +# lora-out + +This model was trained from scratch on the None dataset. +It achieves the following results on the evaluation set: +- Loss: 1.6736 + +## Model description + +More information needed + +## Intended uses & limitations + +More information needed + +## Training and evaluation data + +More information needed + +## Training procedure + +### Training hyperparameters + +The following hyperparameters were used during training: +- learning_rate: 0.0002 +- train_batch_size: 4 +- eval_batch_size: 4 +- seed: 42 +- gradient_accumulation_steps: 4 +- total_train_batch_size: 16 +- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 +- lr_scheduler_type: cosine +- lr_scheduler_warmup_steps: 10 +- num_epochs: 3 + +### Training results + +| Training Loss | Epoch | Step | Validation Loss | +|:-------------:|:-----:|:----:|:---------------:| +| 1.6421 | 0.16 | 50 | 1.6217 | +| 1.6288 | 0.31 | 100 | 1.6144 | +| 1.5725 | 0.47 | 150 | 1.6102 | +| 1.5582 | 0.62 | 200 | 1.6065 | +| 1.6055 | 0.78 | 250 | 1.6051 | +| 1.5733 | 0.93 | 300 | 1.6023 | +| 1.4885 | 1.09 | 350 | 1.6130 | +| 1.484 | 1.24 | 400 | 1.6169 | +| 1.4354 | 1.4 | 450 | 1.6194 | +| 1.4427 | 1.56 | 500 | 1.6187 | +| 1.4687 | 1.71 | 550 | 1.6178 | +| 1.461 | 1.87 | 600 | 1.6174 | +| 1.327 | 2.02 | 650 | 1.6341 | +| 1.3015 | 2.18 | 700 | 1.6665 | +| 1.3328 | 2.33 | 750 | 1.6714 | +| 1.3453 | 2.49 | 800 | 1.6718 | +| 1.3458 | 2.64 | 850 | 1.6725 | +| 1.3016 | 2.8 | 900 | 1.6737 | +| 1.3018 | 2.95 | 950 | 1.6736 | + + +### Framework versions + +- Transformers 4.34.0.dev0 +- Pytorch 2.0.1+cu118 +- Datasets 2.14.5 +- Tokenizers 0.14.0 diff --git a/adapter_config.json b/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/adapter_model.bin b/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..ed86b263f73d0a96f1106b7db94d62f5067e71b8 --- /dev/null +++ b/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:219facdff16b696735f1a84fdd92a0cbac9e197db9eef71c8ce3344d57718790 +size 84046925 diff --git a/added_tokens.json b/added_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..cbce74e5c64b97114098962fa58454a57d7fb532 --- /dev/null +++ b/added_tokens.json @@ -0,0 +1,5 @@ +{ + "": 2, + "": 1, + "": 0 +} diff --git a/checkpoint-100/README.md b/checkpoint-100/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-100/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-100/adapter_config.json b/checkpoint-100/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-100/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-100/adapter_model.bin b/checkpoint-100/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..1c76649294b090326771f7766f9edcd71926cf83 --- /dev/null +++ b/checkpoint-100/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:15b988ab295a843da4e545e17f172c76e0f1bfa3a9dd9edf198aef5f67b77510 +size 84046925 diff --git a/checkpoint-100/optimizer.pt b/checkpoint-100/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..f2095acedfb1f699669821886a17f296fd5929ad --- /dev/null +++ b/checkpoint-100/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9d7a2c502708a4001a524b14e53231dd6709ac7558468e509fa4048cd3f6e77c +size 168039109 diff --git a/checkpoint-100/rng_state.pth b/checkpoint-100/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..cbcea7228f1bf31e0baf7e6d24b11aed76246b0b --- /dev/null +++ b/checkpoint-100/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7b33e2e1c1d173dea2a01ae0984bce42a42fb5bc88b8e940e3d611fa0910ef6b +size 14575 diff --git a/checkpoint-100/scheduler.pt b/checkpoint-100/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..27d34c33a98f77b7b82133cd3bdc97c742de51cc --- /dev/null +++ b/checkpoint-100/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:69c403caea7cd1ca879cd128b86652676d56ca84332ed1fd33c5182cf98e7a10 +size 627 diff --git a/checkpoint-100/trainer_state.json b/checkpoint-100/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..cd714bc68eb535ea3d4aee64db8db535eb90c118 --- /dev/null +++ b/checkpoint-100/trainer_state.json @@ -0,0 +1,635 @@ +{ + "best_metric": 1.6143836975097656, + "best_model_checkpoint": "./lora-out/checkpoint-100", + "epoch": 0.3110419906687403, + "eval_steps": 50, + "global_step": 100, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 2.804271657517056e+17, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-100/training_args.bin b/checkpoint-100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-150/README.md b/checkpoint-150/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-150/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-150/adapter_config.json b/checkpoint-150/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-150/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-150/adapter_model.bin b/checkpoint-150/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..a5cd8dc0561e4031eba4bff993afefbe14c67e06 --- /dev/null +++ b/checkpoint-150/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a8dc95d466eddcfa8f6d8c0d5ccbea2ab6e546948aa9f5852746421f82e77c0 +size 84046925 diff --git a/checkpoint-150/optimizer.pt b/checkpoint-150/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..2ceb4129e9616f7f2095131e66f739e0e171c046 --- /dev/null +++ b/checkpoint-150/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5972252f94afdaa16250d0a4a3efb52fef281f559cc7aab7a784266e5c32b047 +size 168039109 diff --git a/checkpoint-150/rng_state.pth b/checkpoint-150/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..2f1377261e6371947b4f2d01a44a1f544fd5107d --- /dev/null +++ b/checkpoint-150/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:90a6fa23e9d23da035b3cd7f01ac4e5c74cc8369e9fff9529d9aa0c1fed279ac +size 14575 diff --git a/checkpoint-150/scheduler.pt b/checkpoint-150/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..265c695f1b7f952524987b5af2cf187af3df873d --- /dev/null +++ b/checkpoint-150/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fce3e98547f3323418da3efa641653d361277560a755469afb6b5a3bc118996a +size 627 diff --git a/checkpoint-150/trainer_state.json b/checkpoint-150/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..0dd160a5439674ab4bc50a6ab3eee957fcb5e27e --- /dev/null +++ b/checkpoint-150/trainer_state.json @@ -0,0 +1,943 @@ +{ + "best_metric": 1.6101970672607422, + "best_model_checkpoint": "./lora-out/checkpoint-150", + "epoch": 0.4665629860031104, + "eval_steps": 50, + "global_step": 150, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 4.206407486275584e+17, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-150/training_args.bin b/checkpoint-150/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-150/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-200/README.md b/checkpoint-200/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-200/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-200/adapter_config.json b/checkpoint-200/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-200/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-200/adapter_model.bin b/checkpoint-200/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..3712b7a6681f76ef6806f0cecb66c8bce887019f --- /dev/null +++ b/checkpoint-200/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a22955168eaee3fb137ff861f71f50175e92e749a4651883a21a62d89a683d29 +size 84046925 diff --git a/checkpoint-200/optimizer.pt b/checkpoint-200/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..c341e4d4f6203afb9994d86db8f39ebd56d90ecb --- /dev/null +++ b/checkpoint-200/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60fecaee34f97b4a56b90bda6df1c46f987fd91ba3dac957aae197ea931cd966 +size 168039109 diff --git a/checkpoint-200/rng_state.pth b/checkpoint-200/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..a535eb7b802e7c3de6305325b69bf1357723cf65 --- /dev/null +++ b/checkpoint-200/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:af60ac7a22907ef419c05b0c85df6ad791a63738c92e1690745698d449673a5f +size 14575 diff --git a/checkpoint-200/scheduler.pt b/checkpoint-200/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..9f6d89a577455ccb5192a38a43e9c032a7af8311 --- /dev/null +++ b/checkpoint-200/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:591a15d1a18d8da2c87f78ca1e99fd842dc60d1eb3d13c069154f369561b36af +size 627 diff --git a/checkpoint-200/trainer_state.json b/checkpoint-200/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..b510bb84bef2eb644fe7eb4f9beae3ee033e42d2 --- /dev/null +++ b/checkpoint-200/trainer_state.json @@ -0,0 +1,1251 @@ +{ + "best_metric": 1.6065257787704468, + "best_model_checkpoint": "./lora-out/checkpoint-200", + "epoch": 0.6220839813374806, + "eval_steps": 50, + "global_step": 200, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 5.608543315034112e+17, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-200/training_args.bin b/checkpoint-200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-250/README.md b/checkpoint-250/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-250/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-250/adapter_config.json b/checkpoint-250/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-250/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-250/adapter_model.bin b/checkpoint-250/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..86bb69077607edc289ecad82f8357ebc8664afc2 --- /dev/null +++ b/checkpoint-250/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a8f2be5e57601e9471651b0e8821845228d7a8b73ebbaea4df07cd4de3b3ac0d +size 84046925 diff --git a/checkpoint-250/optimizer.pt b/checkpoint-250/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..16e93d89c575c09f132c0ae7e6354480a44d26c4 --- /dev/null +++ b/checkpoint-250/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ce35862ebc0e21a86a1e6281a6822907917216d33e0560244de74624d72d7204 +size 168039109 diff --git a/checkpoint-250/rng_state.pth b/checkpoint-250/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..d0252997a44b8cf2b8797dccf28f22b0f0a3e280 --- /dev/null +++ b/checkpoint-250/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6223a05bbaac028fbfc88016641eb39c4bc95a13c0658c4f8611997cc29c5e41 +size 14575 diff --git a/checkpoint-250/scheduler.pt b/checkpoint-250/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..9f896eb3c1f6d397d7ae183cdc51fde5bb4ac10f --- /dev/null +++ b/checkpoint-250/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a922f36e3287f78e28f040c3193d729dfc63546fcf6eb66508241f493732d059 +size 627 diff --git a/checkpoint-250/trainer_state.json b/checkpoint-250/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..ad926495c96d38c12686dc6dc97f8e97c53bea64 --- /dev/null +++ b/checkpoint-250/trainer_state.json @@ -0,0 +1,1559 @@ +{ + "best_metric": 1.60513174533844, + "best_model_checkpoint": "./lora-out/checkpoint-250", + "epoch": 0.7776049766718507, + "eval_steps": 50, + "global_step": 250, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 7.01067914379264e+17, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-250/training_args.bin b/checkpoint-250/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-250/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-300/README.md b/checkpoint-300/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-300/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-300/adapter_config.json b/checkpoint-300/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-300/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-300/adapter_model.bin b/checkpoint-300/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..ed86b263f73d0a96f1106b7db94d62f5067e71b8 --- /dev/null +++ b/checkpoint-300/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:219facdff16b696735f1a84fdd92a0cbac9e197db9eef71c8ce3344d57718790 +size 84046925 diff --git a/checkpoint-300/optimizer.pt b/checkpoint-300/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..5131c7ff8bce5450c6e72bd20f94affeef7f3605 --- /dev/null +++ b/checkpoint-300/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a343ddfe6887de6fbae314359ede72e86a4b1ca90c8dca7f14f5c67e99f3f746 +size 168039557 diff --git a/checkpoint-300/rng_state.pth b/checkpoint-300/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..6c984446541076d5d23143cf6874f94e26449305 --- /dev/null +++ b/checkpoint-300/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:65057c4c38794cbabddcd3f77e0f9ae9b434f793b5e38c88f0b54b3a59c2a015 +size 14575 diff --git a/checkpoint-300/scheduler.pt b/checkpoint-300/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..9b73dee8481529bed2651c1a519d730710286c3d --- /dev/null +++ b/checkpoint-300/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d16d35aca2529b2ee06dff7108489f02d74e6fa0dc4c995236c9afa17fe97a23 +size 627 diff --git a/checkpoint-300/trainer_state.json b/checkpoint-300/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..b38795dee9b90606209ddfb19af9b39f2219734f --- /dev/null +++ b/checkpoint-300/trainer_state.json @@ -0,0 +1,1867 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 0.9331259720062208, + "eval_steps": 50, + "global_step": 300, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 8.412814972551168e+17, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-300/training_args.bin b/checkpoint-300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-350/README.md b/checkpoint-350/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-350/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-350/adapter_config.json b/checkpoint-350/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-350/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-350/adapter_model.bin b/checkpoint-350/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..3aa18348a92bffdbd24087234ef6ed9fbbb288df --- /dev/null +++ b/checkpoint-350/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2b2acf0965b728b9f39ff875af1a0bb542f43cde79c4a44aec31d97b0a6d6b82 +size 84046925 diff --git a/checkpoint-350/optimizer.pt b/checkpoint-350/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..0af768f7d1521ecba7cb06a6be2d398335e5b2d4 --- /dev/null +++ b/checkpoint-350/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11cb718fc6f4900ddc9d4ea78c1d177248d5b82b9a7a5e9ba5cab022b06f42c3 +size 168039557 diff --git a/checkpoint-350/rng_state.pth b/checkpoint-350/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..89ecbbf70d24c6b6083ded1f63c602ff6f97fbfa --- /dev/null +++ b/checkpoint-350/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:baa9a901b79ce6d4ba427b92cd20bcdf077fd9a1e8d53d67f2028575762241a6 +size 14575 diff --git a/checkpoint-350/scheduler.pt b/checkpoint-350/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..d6c934078b66ba54b958aff7f251b4b47b08a861 --- /dev/null +++ b/checkpoint-350/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:daca21bf7aaa413d1759b6b33dab777d4a7f3e9ba04816c3ce7a0f4e706b5cdb +size 627 diff --git a/checkpoint-350/trainer_state.json b/checkpoint-350/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..c106915d8ea725f5f4cd30c7a79d065e3b6f4bba --- /dev/null +++ b/checkpoint-350/trainer_state.json @@ -0,0 +1,2175 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 1.088646967340591, + "eval_steps": 50, + "global_step": 350, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 9.814950801309696e+17, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-350/training_args.bin b/checkpoint-350/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-350/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-400/README.md b/checkpoint-400/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-400/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-400/adapter_config.json b/checkpoint-400/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-400/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-400/adapter_model.bin b/checkpoint-400/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..018ef8ed4814437b254a1662c18e8418a7cde1ee --- /dev/null +++ b/checkpoint-400/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a6d2001c0f85408898145cf14531acb2e0934bf7c787e5d1e9d0859232e79735 +size 84046925 diff --git a/checkpoint-400/optimizer.pt b/checkpoint-400/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..1e262bcbda41147372c208885bd05af70f21e119 --- /dev/null +++ b/checkpoint-400/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b1580754ee71c3467c0bb3f9b58be9e00264b412a544fa0b4c02a21bd1092c3 +size 168039557 diff --git a/checkpoint-400/rng_state.pth b/checkpoint-400/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..769792d2241c5b1cdb74c54e8055c807e2d56147 --- /dev/null +++ b/checkpoint-400/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7a3af4a612ec824daf7110c775bbaf41e6098798a49ec63ec0fbce0fcecbe67a +size 14575 diff --git a/checkpoint-400/scheduler.pt b/checkpoint-400/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..a7b6e1d935015a1af08154967776f9c2a7027e48 --- /dev/null +++ b/checkpoint-400/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:efd4865ba769f3cb77246d5f15144bdda2e16b6b5bbc8114593d92dea869b823 +size 627 diff --git a/checkpoint-400/trainer_state.json b/checkpoint-400/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..95e17183476254845ec74cdc2dbe86768b43f9c0 --- /dev/null +++ b/checkpoint-400/trainer_state.json @@ -0,0 +1,2483 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 1.244167962674961, + "eval_steps": 50, + "global_step": 400, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 1.1217086630068224e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-400/training_args.bin b/checkpoint-400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-450/README.md b/checkpoint-450/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-450/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-450/adapter_config.json b/checkpoint-450/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-450/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-450/adapter_model.bin b/checkpoint-450/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..c9fa41c85031af52f512bb0b28b608a349547d09 --- /dev/null +++ b/checkpoint-450/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b8f953cee8c40c4968692c6a36df7b00cf5b7ded95a0498bbaa60bd1e5a9720 +size 84046925 diff --git a/checkpoint-450/optimizer.pt b/checkpoint-450/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..5cd259908743c170751647d84592ba96ad46476c --- /dev/null +++ b/checkpoint-450/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:916e42cd880a4c91175fec5365127720f47864e7c927b6f230bfb05f3a0c6215 +size 168039557 diff --git a/checkpoint-450/rng_state.pth b/checkpoint-450/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..f7593ee4f96f38ce6280ba2a7677b94fd5d462c0 --- /dev/null +++ b/checkpoint-450/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:81b38e4b7dada6c9e755f4d6dfa36713a47dcc10b94ab89ee942ac8cd216963e +size 14575 diff --git a/checkpoint-450/scheduler.pt b/checkpoint-450/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..f51334597891376b259f0bcca2dadf98309115e4 --- /dev/null +++ b/checkpoint-450/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:406e58e7e55dfa899f6039a730afbe122fb46b2d29e771a9b1f86f0f6b428330 +size 627 diff --git a/checkpoint-450/trainer_state.json b/checkpoint-450/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..d82e752900810136a79e504f7a9b0fb9f1934e9a --- /dev/null +++ b/checkpoint-450/trainer_state.json @@ -0,0 +1,2791 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 1.3996889580093312, + "eval_steps": 50, + "global_step": 450, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012781362737859735, + "loss": 1.4867, + "step": 401 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012749683144111095, + "loss": 1.4923, + "step": 402 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012717973669288513, + "loss": 1.4858, + "step": 403 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012686234657981933, + "loss": 1.4464, + "step": 404 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012654466455102272, + "loss": 1.4598, + "step": 405 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012622669405877685, + "loss": 1.4237, + "step": 406 + }, + { + "epoch": 1.27, + "learning_rate": 0.0001259084385584979, + "loss": 1.475, + "step": 407 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012558990150869935, + "loss": 1.5201, + "step": 408 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012527108637095427, + "loss": 1.4735, + "step": 409 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012495199660985767, + "loss": 1.4676, + "step": 410 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012463263569298914, + "loss": 1.4671, + "step": 411 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012431300709087468, + "loss": 1.4724, + "step": 412 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012399311427694945, + "loss": 1.5451, + "step": 413 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001236729607275197, + "loss": 1.492, + "step": 414 + }, + { + "epoch": 1.29, + "learning_rate": 0.00012335254992172512, + "loss": 1.5186, + "step": 415 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001230318853415012, + "loss": 1.4622, + "step": 416 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012271097047154096, + "loss": 1.4937, + "step": 417 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012238980879925756, + "loss": 1.4575, + "step": 418 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012206840381474608, + "loss": 1.4801, + "step": 419 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012174675901074577, + "loss": 1.4523, + "step": 420 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012142487788260191, + "loss": 1.4957, + "step": 421 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012110276392822799, + "loss": 1.4757, + "step": 422 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001207804206480677, + "loss": 1.4769, + "step": 423 + }, + { + "epoch": 1.32, + "learning_rate": 0.00012045785154505676, + "loss": 1.4435, + "step": 424 + }, + { + "epoch": 1.32, + "learning_rate": 0.000120135060124585, + "loss": 1.5211, + "step": 425 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011981204989445811, + "loss": 1.4248, + "step": 426 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011948882436485969, + "loss": 1.4883, + "step": 427 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011916538704831293, + "loss": 1.4919, + "step": 428 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011884174145964262, + "loss": 1.4689, + "step": 429 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011851789111593688, + "loss": 1.4071, + "step": 430 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011819383953650874, + "loss": 1.4418, + "step": 431 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011786959024285826, + "loss": 1.5206, + "step": 432 + }, + { + "epoch": 1.35, + "learning_rate": 0.00011754514675863408, + "loss": 1.446, + "step": 433 + }, + { + "epoch": 1.35, + "learning_rate": 0.000117220512609595, + "loss": 1.5165, + "step": 434 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001168956913235719, + "loss": 1.4119, + "step": 435 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011657068643042924, + "loss": 1.503, + "step": 436 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011624550146202682, + "loss": 1.4573, + "step": 437 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011592013995218123, + "loss": 1.4707, + "step": 438 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011559460543662768, + "loss": 1.4304, + "step": 439 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011526890145298137, + "loss": 1.4465, + "step": 440 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001149430315406991, + "loss": 1.4912, + "step": 441 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001146169992410409, + "loss": 1.4549, + "step": 442 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011429080809703145, + "loss": 1.4528, + "step": 443 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011396446165342165, + "loss": 1.4148, + "step": 444 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011363796345665001, + "loss": 1.467, + "step": 445 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001133113170548041, + "loss": 1.492, + "step": 446 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011298452599758217, + "loss": 1.5244, + "step": 447 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011265759383625436, + "loss": 1.4553, + "step": 448 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001123305241236243, + "loss": 1.4764, + "step": 449 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011200332041399027, + "loss": 1.4354, + "step": 450 + }, + { + "epoch": 1.4, + "eval_loss": 1.6193681955337524, + "eval_runtime": 233.6751, + "eval_samples_per_second": 16.36, + "eval_steps_per_second": 4.091, + "step": 450 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 1.2619222458826752e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-450/training_args.bin b/checkpoint-450/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-450/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-50/README.md b/checkpoint-50/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-50/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-50/adapter_config.json b/checkpoint-50/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-50/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-50/adapter_model.bin b/checkpoint-50/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..cfcbaa7a4ad05a57660e81876b20fa1a4582edfe --- /dev/null +++ b/checkpoint-50/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:280052ac4f13ae2c9974fa0bd0ca966d6f63e7af23f6f88d5e54769af71e8786 +size 84046925 diff --git a/checkpoint-50/optimizer.pt b/checkpoint-50/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..dba2899b6339074ef36731a1252f6e39822d279b --- /dev/null +++ b/checkpoint-50/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:81d65170b209adc3787e9e90a256112628af989187bd5d4cfcd76720eb6e7f3f +size 168039109 diff --git a/checkpoint-50/rng_state.pth b/checkpoint-50/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..5f49b1c05b66d856d9a4a17f117392a6322e952e --- /dev/null +++ b/checkpoint-50/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:71357ed48aabf4558feade8d86a234c1c9b2c5afff31a24e51382766acf2f1ad +size 14575 diff --git a/checkpoint-50/scheduler.pt b/checkpoint-50/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..5bd74626f41f869947af0f291985b96d1fd3e7c8 --- /dev/null +++ b/checkpoint-50/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ca8fc99d85cfbbc49ade6b66233733acbf825ad5da2c01d8c26def5167cdfb13 +size 627 diff --git a/checkpoint-50/trainer_state.json b/checkpoint-50/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..c84cb19bf9cbd1ab77365206b12fa37bdeffc902 --- /dev/null +++ b/checkpoint-50/trainer_state.json @@ -0,0 +1,327 @@ +{ + "best_metric": 1.621693730354309, + "best_model_checkpoint": "./lora-out/checkpoint-50", + "epoch": 0.15552099533437014, + "eval_steps": 50, + "global_step": 50, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 1.402135828758528e+17, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-50/training_args.bin b/checkpoint-50/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-50/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-500/README.md b/checkpoint-500/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-500/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-500/adapter_config.json b/checkpoint-500/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-500/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-500/adapter_model.bin b/checkpoint-500/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..2342b7fa410ae53160bc1fc7a83798f27c8a1cfe --- /dev/null +++ b/checkpoint-500/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d13723e5ee2ac64985f9da650587a4a846bb3db74837bee2cd79089518d414f2 +size 84046925 diff --git a/checkpoint-500/optimizer.pt b/checkpoint-500/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..1b960dfcc41b3efed9ea2d0c4310e9fd1e8e00c8 --- /dev/null +++ b/checkpoint-500/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:05526fca5223e426f89a9f2a78c0a6a119b72a5df8ce70a40bf0ebdc6ef3d2e0 +size 168039557 diff --git a/checkpoint-500/rng_state.pth b/checkpoint-500/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..8710e31cead55974fa60da4aecd72efddcbdd325 --- /dev/null +++ b/checkpoint-500/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d115d1202a258d295338c79ff92a4d1c5a7a6b87fa418cb0e8f31297ea6dd32f +size 14575 diff --git a/checkpoint-500/scheduler.pt b/checkpoint-500/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..3a6a3246171955b3c0e5f57638df96ce4f7c8a1a --- /dev/null +++ b/checkpoint-500/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:05b5207892a46b4cea324d6a3359b95dbe81518d01ab89546d80784242bff1a2 +size 627 diff --git a/checkpoint-500/trainer_state.json b/checkpoint-500/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..e24538d4b445aeefa84f19559c7d3a2a7c22dc6a --- /dev/null +++ b/checkpoint-500/trainer_state.json @@ -0,0 +1,3099 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 1.5552099533437014, + "eval_steps": 50, + "global_step": 500, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012781362737859735, + "loss": 1.4867, + "step": 401 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012749683144111095, + "loss": 1.4923, + "step": 402 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012717973669288513, + "loss": 1.4858, + "step": 403 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012686234657981933, + "loss": 1.4464, + "step": 404 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012654466455102272, + "loss": 1.4598, + "step": 405 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012622669405877685, + "loss": 1.4237, + "step": 406 + }, + { + "epoch": 1.27, + "learning_rate": 0.0001259084385584979, + "loss": 1.475, + "step": 407 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012558990150869935, + "loss": 1.5201, + "step": 408 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012527108637095427, + "loss": 1.4735, + "step": 409 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012495199660985767, + "loss": 1.4676, + "step": 410 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012463263569298914, + "loss": 1.4671, + "step": 411 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012431300709087468, + "loss": 1.4724, + "step": 412 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012399311427694945, + "loss": 1.5451, + "step": 413 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001236729607275197, + "loss": 1.492, + "step": 414 + }, + { + "epoch": 1.29, + "learning_rate": 0.00012335254992172512, + "loss": 1.5186, + "step": 415 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001230318853415012, + "loss": 1.4622, + "step": 416 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012271097047154096, + "loss": 1.4937, + "step": 417 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012238980879925756, + "loss": 1.4575, + "step": 418 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012206840381474608, + "loss": 1.4801, + "step": 419 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012174675901074577, + "loss": 1.4523, + "step": 420 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012142487788260191, + "loss": 1.4957, + "step": 421 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012110276392822799, + "loss": 1.4757, + "step": 422 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001207804206480677, + "loss": 1.4769, + "step": 423 + }, + { + "epoch": 1.32, + "learning_rate": 0.00012045785154505676, + "loss": 1.4435, + "step": 424 + }, + { + "epoch": 1.32, + "learning_rate": 0.000120135060124585, + "loss": 1.5211, + "step": 425 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011981204989445811, + "loss": 1.4248, + "step": 426 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011948882436485969, + "loss": 1.4883, + "step": 427 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011916538704831293, + "loss": 1.4919, + "step": 428 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011884174145964262, + "loss": 1.4689, + "step": 429 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011851789111593688, + "loss": 1.4071, + "step": 430 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011819383953650874, + "loss": 1.4418, + "step": 431 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011786959024285826, + "loss": 1.5206, + "step": 432 + }, + { + "epoch": 1.35, + "learning_rate": 0.00011754514675863408, + "loss": 1.446, + "step": 433 + }, + { + "epoch": 1.35, + "learning_rate": 0.000117220512609595, + "loss": 1.5165, + "step": 434 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001168956913235719, + "loss": 1.4119, + "step": 435 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011657068643042924, + "loss": 1.503, + "step": 436 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011624550146202682, + "loss": 1.4573, + "step": 437 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011592013995218123, + "loss": 1.4707, + "step": 438 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011559460543662768, + "loss": 1.4304, + "step": 439 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011526890145298137, + "loss": 1.4465, + "step": 440 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001149430315406991, + "loss": 1.4912, + "step": 441 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001146169992410409, + "loss": 1.4549, + "step": 442 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011429080809703145, + "loss": 1.4528, + "step": 443 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011396446165342165, + "loss": 1.4148, + "step": 444 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011363796345665001, + "loss": 1.467, + "step": 445 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001133113170548041, + "loss": 1.492, + "step": 446 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011298452599758217, + "loss": 1.5244, + "step": 447 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011265759383625436, + "loss": 1.4553, + "step": 448 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001123305241236243, + "loss": 1.4764, + "step": 449 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011200332041399027, + "loss": 1.4354, + "step": 450 + }, + { + "epoch": 1.4, + "eval_loss": 1.6193681955337524, + "eval_runtime": 233.6751, + "eval_samples_per_second": 16.36, + "eval_steps_per_second": 4.091, + "step": 450 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011167598626310682, + "loss": 1.4946, + "step": 451 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011134852522814596, + "loss": 1.4558, + "step": 452 + }, + { + "epoch": 1.41, + "learning_rate": 0.0001110209408676586, + "loss": 1.4549, + "step": 453 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011069323674153585, + "loss": 1.4992, + "step": 454 + }, + { + "epoch": 1.42, + "learning_rate": 0.0001103654164109702, + "loss": 1.4828, + "step": 455 + }, + { + "epoch": 1.42, + "learning_rate": 0.00011003748343841711, + "loss": 1.4939, + "step": 456 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010970944138755604, + "loss": 1.4761, + "step": 457 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010938129382325184, + "loss": 1.4394, + "step": 458 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010905304431151602, + "loss": 1.4852, + "step": 459 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010872469641946783, + "loss": 1.4479, + "step": 460 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010839625371529583, + "loss": 1.5161, + "step": 461 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010806771976821872, + "loss": 1.5104, + "step": 462 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001077390981484469, + "loss": 1.5056, + "step": 463 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010741039242714337, + "loss": 1.4919, + "step": 464 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010708160617638521, + "loss": 1.4605, + "step": 465 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010675274296912452, + "loss": 1.5191, + "step": 466 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010642380637914975, + "loss": 1.4504, + "step": 467 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010609479998104684, + "loss": 1.4619, + "step": 468 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010576572735016016, + "loss": 1.4619, + "step": 469 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010543659206255409, + "loss": 1.4962, + "step": 470 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010510739769497378, + "loss": 1.4901, + "step": 471 + }, + { + "epoch": 1.47, + "learning_rate": 0.0001047781478248063, + "loss": 1.4708, + "step": 472 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010444884603004213, + "loss": 1.4756, + "step": 473 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010411949588923577, + "loss": 1.3948, + "step": 474 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010379010098146728, + "loss": 1.5183, + "step": 475 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010346066488630308, + "loss": 1.4252, + "step": 476 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010313119118375727, + "loss": 1.4686, + "step": 477 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010280168345425256, + "loss": 1.5285, + "step": 478 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010247214527858149, + "loss": 1.4649, + "step": 479 + }, + { + "epoch": 1.49, + "learning_rate": 0.0001021425802378674, + "loss": 1.4602, + "step": 480 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010181299191352566, + "loss": 1.5102, + "step": 481 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010148338388722465, + "loss": 1.4894, + "step": 482 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010115375974084677, + "loss": 1.501, + "step": 483 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010082412305644964, + "loss": 1.481, + "step": 484 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010049447741622717, + "loss": 1.4927, + "step": 485 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010016482640247058, + "loss": 1.512, + "step": 486 + }, + { + "epoch": 1.51, + "learning_rate": 9.983517359752945e-05, + "loss": 1.4622, + "step": 487 + }, + { + "epoch": 1.52, + "learning_rate": 9.950552258377284e-05, + "loss": 1.4956, + "step": 488 + }, + { + "epoch": 1.52, + "learning_rate": 9.917587694355037e-05, + "loss": 1.493, + "step": 489 + }, + { + "epoch": 1.52, + "learning_rate": 9.884624025915328e-05, + "loss": 1.4629, + "step": 490 + }, + { + "epoch": 1.53, + "learning_rate": 9.851661611277537e-05, + "loss": 1.4531, + "step": 491 + }, + { + "epoch": 1.53, + "learning_rate": 9.818700808647435e-05, + "loss": 1.4656, + "step": 492 + }, + { + "epoch": 1.53, + "learning_rate": 9.785741976213261e-05, + "loss": 1.4982, + "step": 493 + }, + { + "epoch": 1.54, + "learning_rate": 9.752785472141854e-05, + "loss": 1.5053, + "step": 494 + }, + { + "epoch": 1.54, + "learning_rate": 9.719831654574745e-05, + "loss": 1.4619, + "step": 495 + }, + { + "epoch": 1.54, + "learning_rate": 9.686880881624275e-05, + "loss": 1.486, + "step": 496 + }, + { + "epoch": 1.55, + "learning_rate": 9.653933511369696e-05, + "loss": 1.4788, + "step": 497 + }, + { + "epoch": 1.55, + "learning_rate": 9.620989901853275e-05, + "loss": 1.4663, + "step": 498 + }, + { + "epoch": 1.55, + "learning_rate": 9.588050411076424e-05, + "loss": 1.5138, + "step": 499 + }, + { + "epoch": 1.56, + "learning_rate": 9.555115396995788e-05, + "loss": 1.4427, + "step": 500 + }, + { + "epoch": 1.56, + "eval_loss": 1.6187018156051636, + "eval_runtime": 233.6591, + "eval_samples_per_second": 16.361, + "eval_steps_per_second": 4.091, + "step": 500 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 1.402135828758528e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-500/training_args.bin b/checkpoint-500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-550/README.md b/checkpoint-550/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-550/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-550/adapter_config.json b/checkpoint-550/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-550/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-550/adapter_model.bin b/checkpoint-550/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..87f1aaf535e8941e36594e283cd04cac5dc4b0e2 --- /dev/null +++ b/checkpoint-550/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:786f550eeff64d5827afa6620a0dca298d52a029329a06e9e6e7f5c2e68993eb +size 84046925 diff --git a/checkpoint-550/optimizer.pt b/checkpoint-550/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..cd5bed2a1e7fe447c006fce4fbcfecc38906d385 --- /dev/null +++ b/checkpoint-550/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c81451b560aa6c1bee584409eeccd4ba588ec368be6b8fd090022fe6438b5acf +size 168039557 diff --git a/checkpoint-550/rng_state.pth b/checkpoint-550/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..b3486e811a375387c567f8ef46459d697ae63b3d --- /dev/null +++ b/checkpoint-550/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7bde64cb55d28230e1ca9057db1b6eb173ea049c9ee764320f59e7bf679394e9 +size 14575 diff --git a/checkpoint-550/scheduler.pt b/checkpoint-550/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..b3fd7311776c23e7dfc83f68c1a3aaa10ce08f9f --- /dev/null +++ b/checkpoint-550/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f5c86f1aa7061598da96d6bd4fc2522c7baab0b7a0f676b633a2be6642ab22d8 +size 627 diff --git a/checkpoint-550/trainer_state.json b/checkpoint-550/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..0c244b0511bf124115ab324eb69fd5dfa2bcca71 --- /dev/null +++ b/checkpoint-550/trainer_state.json @@ -0,0 +1,3407 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 1.7107309486780715, + "eval_steps": 50, + "global_step": 550, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012781362737859735, + "loss": 1.4867, + "step": 401 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012749683144111095, + "loss": 1.4923, + "step": 402 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012717973669288513, + "loss": 1.4858, + "step": 403 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012686234657981933, + "loss": 1.4464, + "step": 404 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012654466455102272, + "loss": 1.4598, + "step": 405 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012622669405877685, + "loss": 1.4237, + "step": 406 + }, + { + "epoch": 1.27, + "learning_rate": 0.0001259084385584979, + "loss": 1.475, + "step": 407 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012558990150869935, + "loss": 1.5201, + "step": 408 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012527108637095427, + "loss": 1.4735, + "step": 409 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012495199660985767, + "loss": 1.4676, + "step": 410 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012463263569298914, + "loss": 1.4671, + "step": 411 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012431300709087468, + "loss": 1.4724, + "step": 412 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012399311427694945, + "loss": 1.5451, + "step": 413 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001236729607275197, + "loss": 1.492, + "step": 414 + }, + { + "epoch": 1.29, + "learning_rate": 0.00012335254992172512, + "loss": 1.5186, + "step": 415 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001230318853415012, + "loss": 1.4622, + "step": 416 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012271097047154096, + "loss": 1.4937, + "step": 417 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012238980879925756, + "loss": 1.4575, + "step": 418 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012206840381474608, + "loss": 1.4801, + "step": 419 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012174675901074577, + "loss": 1.4523, + "step": 420 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012142487788260191, + "loss": 1.4957, + "step": 421 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012110276392822799, + "loss": 1.4757, + "step": 422 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001207804206480677, + "loss": 1.4769, + "step": 423 + }, + { + "epoch": 1.32, + "learning_rate": 0.00012045785154505676, + "loss": 1.4435, + "step": 424 + }, + { + "epoch": 1.32, + "learning_rate": 0.000120135060124585, + "loss": 1.5211, + "step": 425 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011981204989445811, + "loss": 1.4248, + "step": 426 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011948882436485969, + "loss": 1.4883, + "step": 427 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011916538704831293, + "loss": 1.4919, + "step": 428 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011884174145964262, + "loss": 1.4689, + "step": 429 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011851789111593688, + "loss": 1.4071, + "step": 430 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011819383953650874, + "loss": 1.4418, + "step": 431 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011786959024285826, + "loss": 1.5206, + "step": 432 + }, + { + "epoch": 1.35, + "learning_rate": 0.00011754514675863408, + "loss": 1.446, + "step": 433 + }, + { + "epoch": 1.35, + "learning_rate": 0.000117220512609595, + "loss": 1.5165, + "step": 434 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001168956913235719, + "loss": 1.4119, + "step": 435 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011657068643042924, + "loss": 1.503, + "step": 436 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011624550146202682, + "loss": 1.4573, + "step": 437 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011592013995218123, + "loss": 1.4707, + "step": 438 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011559460543662768, + "loss": 1.4304, + "step": 439 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011526890145298137, + "loss": 1.4465, + "step": 440 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001149430315406991, + "loss": 1.4912, + "step": 441 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001146169992410409, + "loss": 1.4549, + "step": 442 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011429080809703145, + "loss": 1.4528, + "step": 443 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011396446165342165, + "loss": 1.4148, + "step": 444 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011363796345665001, + "loss": 1.467, + "step": 445 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001133113170548041, + "loss": 1.492, + "step": 446 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011298452599758217, + "loss": 1.5244, + "step": 447 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011265759383625436, + "loss": 1.4553, + "step": 448 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001123305241236243, + "loss": 1.4764, + "step": 449 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011200332041399027, + "loss": 1.4354, + "step": 450 + }, + { + "epoch": 1.4, + "eval_loss": 1.6193681955337524, + "eval_runtime": 233.6751, + "eval_samples_per_second": 16.36, + "eval_steps_per_second": 4.091, + "step": 450 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011167598626310682, + "loss": 1.4946, + "step": 451 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011134852522814596, + "loss": 1.4558, + "step": 452 + }, + { + "epoch": 1.41, + "learning_rate": 0.0001110209408676586, + "loss": 1.4549, + "step": 453 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011069323674153585, + "loss": 1.4992, + "step": 454 + }, + { + "epoch": 1.42, + "learning_rate": 0.0001103654164109702, + "loss": 1.4828, + "step": 455 + }, + { + "epoch": 1.42, + "learning_rate": 0.00011003748343841711, + "loss": 1.4939, + "step": 456 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010970944138755604, + "loss": 1.4761, + "step": 457 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010938129382325184, + "loss": 1.4394, + "step": 458 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010905304431151602, + "loss": 1.4852, + "step": 459 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010872469641946783, + "loss": 1.4479, + "step": 460 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010839625371529583, + "loss": 1.5161, + "step": 461 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010806771976821872, + "loss": 1.5104, + "step": 462 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001077390981484469, + "loss": 1.5056, + "step": 463 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010741039242714337, + "loss": 1.4919, + "step": 464 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010708160617638521, + "loss": 1.4605, + "step": 465 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010675274296912452, + "loss": 1.5191, + "step": 466 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010642380637914975, + "loss": 1.4504, + "step": 467 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010609479998104684, + "loss": 1.4619, + "step": 468 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010576572735016016, + "loss": 1.4619, + "step": 469 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010543659206255409, + "loss": 1.4962, + "step": 470 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010510739769497378, + "loss": 1.4901, + "step": 471 + }, + { + "epoch": 1.47, + "learning_rate": 0.0001047781478248063, + "loss": 1.4708, + "step": 472 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010444884603004213, + "loss": 1.4756, + "step": 473 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010411949588923577, + "loss": 1.3948, + "step": 474 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010379010098146728, + "loss": 1.5183, + "step": 475 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010346066488630308, + "loss": 1.4252, + "step": 476 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010313119118375727, + "loss": 1.4686, + "step": 477 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010280168345425256, + "loss": 1.5285, + "step": 478 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010247214527858149, + "loss": 1.4649, + "step": 479 + }, + { + "epoch": 1.49, + "learning_rate": 0.0001021425802378674, + "loss": 1.4602, + "step": 480 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010181299191352566, + "loss": 1.5102, + "step": 481 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010148338388722465, + "loss": 1.4894, + "step": 482 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010115375974084677, + "loss": 1.501, + "step": 483 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010082412305644964, + "loss": 1.481, + "step": 484 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010049447741622717, + "loss": 1.4927, + "step": 485 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010016482640247058, + "loss": 1.512, + "step": 486 + }, + { + "epoch": 1.51, + "learning_rate": 9.983517359752945e-05, + "loss": 1.4622, + "step": 487 + }, + { + "epoch": 1.52, + "learning_rate": 9.950552258377284e-05, + "loss": 1.4956, + "step": 488 + }, + { + "epoch": 1.52, + "learning_rate": 9.917587694355037e-05, + "loss": 1.493, + "step": 489 + }, + { + "epoch": 1.52, + "learning_rate": 9.884624025915328e-05, + "loss": 1.4629, + "step": 490 + }, + { + "epoch": 1.53, + "learning_rate": 9.851661611277537e-05, + "loss": 1.4531, + "step": 491 + }, + { + "epoch": 1.53, + "learning_rate": 9.818700808647435e-05, + "loss": 1.4656, + "step": 492 + }, + { + "epoch": 1.53, + "learning_rate": 9.785741976213261e-05, + "loss": 1.4982, + "step": 493 + }, + { + "epoch": 1.54, + "learning_rate": 9.752785472141854e-05, + "loss": 1.5053, + "step": 494 + }, + { + "epoch": 1.54, + "learning_rate": 9.719831654574745e-05, + "loss": 1.4619, + "step": 495 + }, + { + "epoch": 1.54, + "learning_rate": 9.686880881624275e-05, + "loss": 1.486, + "step": 496 + }, + { + "epoch": 1.55, + "learning_rate": 9.653933511369696e-05, + "loss": 1.4788, + "step": 497 + }, + { + "epoch": 1.55, + "learning_rate": 9.620989901853275e-05, + "loss": 1.4663, + "step": 498 + }, + { + "epoch": 1.55, + "learning_rate": 9.588050411076424e-05, + "loss": 1.5138, + "step": 499 + }, + { + "epoch": 1.56, + "learning_rate": 9.555115396995788e-05, + "loss": 1.4427, + "step": 500 + }, + { + "epoch": 1.56, + "eval_loss": 1.6187018156051636, + "eval_runtime": 233.6591, + "eval_samples_per_second": 16.361, + "eval_steps_per_second": 4.091, + "step": 500 + }, + { + "epoch": 1.56, + "learning_rate": 9.522185217519371e-05, + "loss": 1.4696, + "step": 501 + }, + { + "epoch": 1.56, + "learning_rate": 9.489260230502626e-05, + "loss": 1.4052, + "step": 502 + }, + { + "epoch": 1.56, + "learning_rate": 9.45634079374459e-05, + "loss": 1.4688, + "step": 503 + }, + { + "epoch": 1.57, + "learning_rate": 9.423427264983986e-05, + "loss": 1.4266, + "step": 504 + }, + { + "epoch": 1.57, + "learning_rate": 9.390520001895321e-05, + "loss": 1.4887, + "step": 505 + }, + { + "epoch": 1.57, + "learning_rate": 9.357619362085027e-05, + "loss": 1.4992, + "step": 506 + }, + { + "epoch": 1.58, + "learning_rate": 9.32472570308755e-05, + "loss": 1.4626, + "step": 507 + }, + { + "epoch": 1.58, + "learning_rate": 9.291839382361481e-05, + "loss": 1.4984, + "step": 508 + }, + { + "epoch": 1.58, + "learning_rate": 9.258960757285664e-05, + "loss": 1.3692, + "step": 509 + }, + { + "epoch": 1.59, + "learning_rate": 9.226090185155314e-05, + "loss": 1.4325, + "step": 510 + }, + { + "epoch": 1.59, + "learning_rate": 9.19322802317813e-05, + "loss": 1.5049, + "step": 511 + }, + { + "epoch": 1.59, + "learning_rate": 9.160374628470421e-05, + "loss": 1.4589, + "step": 512 + }, + { + "epoch": 1.6, + "learning_rate": 9.127530358053218e-05, + "loss": 1.4291, + "step": 513 + }, + { + "epoch": 1.6, + "learning_rate": 9.094695568848402e-05, + "loss": 1.4474, + "step": 514 + }, + { + "epoch": 1.6, + "learning_rate": 9.061870617674817e-05, + "loss": 1.513, + "step": 515 + }, + { + "epoch": 1.6, + "learning_rate": 9.029055861244397e-05, + "loss": 1.4609, + "step": 516 + }, + { + "epoch": 1.61, + "learning_rate": 8.99625165615829e-05, + "loss": 1.5144, + "step": 517 + }, + { + "epoch": 1.61, + "learning_rate": 8.963458358902985e-05, + "loss": 1.4294, + "step": 518 + }, + { + "epoch": 1.61, + "learning_rate": 8.93067632584642e-05, + "loss": 1.4516, + "step": 519 + }, + { + "epoch": 1.62, + "learning_rate": 8.897905913234143e-05, + "loss": 1.4659, + "step": 520 + }, + { + "epoch": 1.62, + "learning_rate": 8.865147477185405e-05, + "loss": 1.4787, + "step": 521 + }, + { + "epoch": 1.62, + "learning_rate": 8.832401373689319e-05, + "loss": 1.4601, + "step": 522 + }, + { + "epoch": 1.63, + "learning_rate": 8.799667958600973e-05, + "loss": 1.4955, + "step": 523 + }, + { + "epoch": 1.63, + "learning_rate": 8.766947587637573e-05, + "loss": 1.4231, + "step": 524 + }, + { + "epoch": 1.63, + "learning_rate": 8.734240616374565e-05, + "loss": 1.4952, + "step": 525 + }, + { + "epoch": 1.64, + "learning_rate": 8.701547400241788e-05, + "loss": 1.4707, + "step": 526 + }, + { + "epoch": 1.64, + "learning_rate": 8.668868294519593e-05, + "loss": 1.5023, + "step": 527 + }, + { + "epoch": 1.64, + "learning_rate": 8.636203654335002e-05, + "loss": 1.4702, + "step": 528 + }, + { + "epoch": 1.65, + "learning_rate": 8.603553834657836e-05, + "loss": 1.4399, + "step": 529 + }, + { + "epoch": 1.65, + "learning_rate": 8.570919190296855e-05, + "loss": 1.5175, + "step": 530 + }, + { + "epoch": 1.65, + "learning_rate": 8.53830007589591e-05, + "loss": 1.4715, + "step": 531 + }, + { + "epoch": 1.65, + "learning_rate": 8.505696845930096e-05, + "loss": 1.5292, + "step": 532 + }, + { + "epoch": 1.66, + "learning_rate": 8.473109854701869e-05, + "loss": 1.5287, + "step": 533 + }, + { + "epoch": 1.66, + "learning_rate": 8.440539456337235e-05, + "loss": 1.4762, + "step": 534 + }, + { + "epoch": 1.66, + "learning_rate": 8.407986004781879e-05, + "loss": 1.4536, + "step": 535 + }, + { + "epoch": 1.67, + "learning_rate": 8.375449853797322e-05, + "loss": 1.5018, + "step": 536 + }, + { + "epoch": 1.67, + "learning_rate": 8.342931356957076e-05, + "loss": 1.4723, + "step": 537 + }, + { + "epoch": 1.67, + "learning_rate": 8.310430867642812e-05, + "loss": 1.4905, + "step": 538 + }, + { + "epoch": 1.68, + "learning_rate": 8.277948739040503e-05, + "loss": 1.4651, + "step": 539 + }, + { + "epoch": 1.68, + "learning_rate": 8.245485324136597e-05, + "loss": 1.4482, + "step": 540 + }, + { + "epoch": 1.68, + "learning_rate": 8.213040975714175e-05, + "loss": 1.3977, + "step": 541 + }, + { + "epoch": 1.69, + "learning_rate": 8.180616046349129e-05, + "loss": 1.5594, + "step": 542 + }, + { + "epoch": 1.69, + "learning_rate": 8.148210888406316e-05, + "loss": 1.4995, + "step": 543 + }, + { + "epoch": 1.69, + "learning_rate": 8.115825854035737e-05, + "loss": 1.5106, + "step": 544 + }, + { + "epoch": 1.7, + "learning_rate": 8.083461295168707e-05, + "loss": 1.4219, + "step": 545 + }, + { + "epoch": 1.7, + "learning_rate": 8.051117563514036e-05, + "loss": 1.4766, + "step": 546 + }, + { + "epoch": 1.7, + "learning_rate": 8.018795010554193e-05, + "loss": 1.5241, + "step": 547 + }, + { + "epoch": 1.7, + "learning_rate": 7.986493987541502e-05, + "loss": 1.4673, + "step": 548 + }, + { + "epoch": 1.71, + "learning_rate": 7.954214845494325e-05, + "loss": 1.4236, + "step": 549 + }, + { + "epoch": 1.71, + "learning_rate": 7.921957935193232e-05, + "loss": 1.4687, + "step": 550 + }, + { + "epoch": 1.71, + "eval_loss": 1.617763876914978, + "eval_runtime": 233.6334, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 550 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 1.5423494116343808e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-550/training_args.bin b/checkpoint-550/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-550/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-600/README.md b/checkpoint-600/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-600/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-600/adapter_config.json b/checkpoint-600/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-600/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-600/adapter_model.bin b/checkpoint-600/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..de5931c6f2b020cf31a3030a5bcbd9dd62a58fdc --- /dev/null +++ b/checkpoint-600/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:696e7f09eb5ac2a777964c001d16511164714a3fcd0e11d68dbb949e7c7ea7ac +size 84046925 diff --git a/checkpoint-600/optimizer.pt b/checkpoint-600/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..3a7159acdc902fc05f9021d48ed53ff21f005ecd --- /dev/null +++ b/checkpoint-600/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:25a488aebb4aa9562f82e96cd380ec9b0dff78ee5493d0d9a208bb2d3ce80e9d +size 168039557 diff --git a/checkpoint-600/rng_state.pth b/checkpoint-600/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..99e5acd89794b0e529d94aa322aa381f05e3fb3d --- /dev/null +++ b/checkpoint-600/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:26c9d5bf309d1d32c5bbec71f8ccf4ee107fb015ed8d73f046811bdceefe8842 +size 14575 diff --git a/checkpoint-600/scheduler.pt b/checkpoint-600/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..1696189912b14acb9aaf2fd28b25578769cad01d --- /dev/null +++ b/checkpoint-600/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b02c1d90a866c0f6b89be9b5ec8486412ff944678febddcce34bdcf69e572ad3 +size 627 diff --git a/checkpoint-600/trainer_state.json b/checkpoint-600/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..9b293d48d6c5caa0b6ee62a147cd7f099c4ffb8a --- /dev/null +++ b/checkpoint-600/trainer_state.json @@ -0,0 +1,3715 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 1.8662519440124417, + "eval_steps": 50, + "global_step": 600, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012781362737859735, + "loss": 1.4867, + "step": 401 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012749683144111095, + "loss": 1.4923, + "step": 402 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012717973669288513, + "loss": 1.4858, + "step": 403 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012686234657981933, + "loss": 1.4464, + "step": 404 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012654466455102272, + "loss": 1.4598, + "step": 405 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012622669405877685, + "loss": 1.4237, + "step": 406 + }, + { + "epoch": 1.27, + "learning_rate": 0.0001259084385584979, + "loss": 1.475, + "step": 407 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012558990150869935, + "loss": 1.5201, + "step": 408 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012527108637095427, + "loss": 1.4735, + "step": 409 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012495199660985767, + "loss": 1.4676, + "step": 410 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012463263569298914, + "loss": 1.4671, + "step": 411 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012431300709087468, + "loss": 1.4724, + "step": 412 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012399311427694945, + "loss": 1.5451, + "step": 413 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001236729607275197, + "loss": 1.492, + "step": 414 + }, + { + "epoch": 1.29, + "learning_rate": 0.00012335254992172512, + "loss": 1.5186, + "step": 415 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001230318853415012, + "loss": 1.4622, + "step": 416 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012271097047154096, + "loss": 1.4937, + "step": 417 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012238980879925756, + "loss": 1.4575, + "step": 418 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012206840381474608, + "loss": 1.4801, + "step": 419 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012174675901074577, + "loss": 1.4523, + "step": 420 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012142487788260191, + "loss": 1.4957, + "step": 421 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012110276392822799, + "loss": 1.4757, + "step": 422 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001207804206480677, + "loss": 1.4769, + "step": 423 + }, + { + "epoch": 1.32, + "learning_rate": 0.00012045785154505676, + "loss": 1.4435, + "step": 424 + }, + { + "epoch": 1.32, + "learning_rate": 0.000120135060124585, + "loss": 1.5211, + "step": 425 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011981204989445811, + "loss": 1.4248, + "step": 426 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011948882436485969, + "loss": 1.4883, + "step": 427 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011916538704831293, + "loss": 1.4919, + "step": 428 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011884174145964262, + "loss": 1.4689, + "step": 429 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011851789111593688, + "loss": 1.4071, + "step": 430 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011819383953650874, + "loss": 1.4418, + "step": 431 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011786959024285826, + "loss": 1.5206, + "step": 432 + }, + { + "epoch": 1.35, + "learning_rate": 0.00011754514675863408, + "loss": 1.446, + "step": 433 + }, + { + "epoch": 1.35, + "learning_rate": 0.000117220512609595, + "loss": 1.5165, + "step": 434 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001168956913235719, + "loss": 1.4119, + "step": 435 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011657068643042924, + "loss": 1.503, + "step": 436 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011624550146202682, + "loss": 1.4573, + "step": 437 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011592013995218123, + "loss": 1.4707, + "step": 438 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011559460543662768, + "loss": 1.4304, + "step": 439 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011526890145298137, + "loss": 1.4465, + "step": 440 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001149430315406991, + "loss": 1.4912, + "step": 441 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001146169992410409, + "loss": 1.4549, + "step": 442 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011429080809703145, + "loss": 1.4528, + "step": 443 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011396446165342165, + "loss": 1.4148, + "step": 444 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011363796345665001, + "loss": 1.467, + "step": 445 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001133113170548041, + "loss": 1.492, + "step": 446 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011298452599758217, + "loss": 1.5244, + "step": 447 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011265759383625436, + "loss": 1.4553, + "step": 448 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001123305241236243, + "loss": 1.4764, + "step": 449 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011200332041399027, + "loss": 1.4354, + "step": 450 + }, + { + "epoch": 1.4, + "eval_loss": 1.6193681955337524, + "eval_runtime": 233.6751, + "eval_samples_per_second": 16.36, + "eval_steps_per_second": 4.091, + "step": 450 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011167598626310682, + "loss": 1.4946, + "step": 451 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011134852522814596, + "loss": 1.4558, + "step": 452 + }, + { + "epoch": 1.41, + "learning_rate": 0.0001110209408676586, + "loss": 1.4549, + "step": 453 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011069323674153585, + "loss": 1.4992, + "step": 454 + }, + { + "epoch": 1.42, + "learning_rate": 0.0001103654164109702, + "loss": 1.4828, + "step": 455 + }, + { + "epoch": 1.42, + "learning_rate": 0.00011003748343841711, + "loss": 1.4939, + "step": 456 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010970944138755604, + "loss": 1.4761, + "step": 457 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010938129382325184, + "loss": 1.4394, + "step": 458 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010905304431151602, + "loss": 1.4852, + "step": 459 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010872469641946783, + "loss": 1.4479, + "step": 460 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010839625371529583, + "loss": 1.5161, + "step": 461 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010806771976821872, + "loss": 1.5104, + "step": 462 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001077390981484469, + "loss": 1.5056, + "step": 463 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010741039242714337, + "loss": 1.4919, + "step": 464 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010708160617638521, + "loss": 1.4605, + "step": 465 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010675274296912452, + "loss": 1.5191, + "step": 466 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010642380637914975, + "loss": 1.4504, + "step": 467 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010609479998104684, + "loss": 1.4619, + "step": 468 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010576572735016016, + "loss": 1.4619, + "step": 469 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010543659206255409, + "loss": 1.4962, + "step": 470 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010510739769497378, + "loss": 1.4901, + "step": 471 + }, + { + "epoch": 1.47, + "learning_rate": 0.0001047781478248063, + "loss": 1.4708, + "step": 472 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010444884603004213, + "loss": 1.4756, + "step": 473 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010411949588923577, + "loss": 1.3948, + "step": 474 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010379010098146728, + "loss": 1.5183, + "step": 475 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010346066488630308, + "loss": 1.4252, + "step": 476 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010313119118375727, + "loss": 1.4686, + "step": 477 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010280168345425256, + "loss": 1.5285, + "step": 478 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010247214527858149, + "loss": 1.4649, + "step": 479 + }, + { + "epoch": 1.49, + "learning_rate": 0.0001021425802378674, + "loss": 1.4602, + "step": 480 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010181299191352566, + "loss": 1.5102, + "step": 481 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010148338388722465, + "loss": 1.4894, + "step": 482 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010115375974084677, + "loss": 1.501, + "step": 483 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010082412305644964, + "loss": 1.481, + "step": 484 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010049447741622717, + "loss": 1.4927, + "step": 485 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010016482640247058, + "loss": 1.512, + "step": 486 + }, + { + "epoch": 1.51, + "learning_rate": 9.983517359752945e-05, + "loss": 1.4622, + "step": 487 + }, + { + "epoch": 1.52, + "learning_rate": 9.950552258377284e-05, + "loss": 1.4956, + "step": 488 + }, + { + "epoch": 1.52, + "learning_rate": 9.917587694355037e-05, + "loss": 1.493, + "step": 489 + }, + { + "epoch": 1.52, + "learning_rate": 9.884624025915328e-05, + "loss": 1.4629, + "step": 490 + }, + { + "epoch": 1.53, + "learning_rate": 9.851661611277537e-05, + "loss": 1.4531, + "step": 491 + }, + { + "epoch": 1.53, + "learning_rate": 9.818700808647435e-05, + "loss": 1.4656, + "step": 492 + }, + { + "epoch": 1.53, + "learning_rate": 9.785741976213261e-05, + "loss": 1.4982, + "step": 493 + }, + { + "epoch": 1.54, + "learning_rate": 9.752785472141854e-05, + "loss": 1.5053, + "step": 494 + }, + { + "epoch": 1.54, + "learning_rate": 9.719831654574745e-05, + "loss": 1.4619, + "step": 495 + }, + { + "epoch": 1.54, + "learning_rate": 9.686880881624275e-05, + "loss": 1.486, + "step": 496 + }, + { + "epoch": 1.55, + "learning_rate": 9.653933511369696e-05, + "loss": 1.4788, + "step": 497 + }, + { + "epoch": 1.55, + "learning_rate": 9.620989901853275e-05, + "loss": 1.4663, + "step": 498 + }, + { + "epoch": 1.55, + "learning_rate": 9.588050411076424e-05, + "loss": 1.5138, + "step": 499 + }, + { + "epoch": 1.56, + "learning_rate": 9.555115396995788e-05, + "loss": 1.4427, + "step": 500 + }, + { + "epoch": 1.56, + "eval_loss": 1.6187018156051636, + "eval_runtime": 233.6591, + "eval_samples_per_second": 16.361, + "eval_steps_per_second": 4.091, + "step": 500 + }, + { + "epoch": 1.56, + "learning_rate": 9.522185217519371e-05, + "loss": 1.4696, + "step": 501 + }, + { + "epoch": 1.56, + "learning_rate": 9.489260230502626e-05, + "loss": 1.4052, + "step": 502 + }, + { + "epoch": 1.56, + "learning_rate": 9.45634079374459e-05, + "loss": 1.4688, + "step": 503 + }, + { + "epoch": 1.57, + "learning_rate": 9.423427264983986e-05, + "loss": 1.4266, + "step": 504 + }, + { + "epoch": 1.57, + "learning_rate": 9.390520001895321e-05, + "loss": 1.4887, + "step": 505 + }, + { + "epoch": 1.57, + "learning_rate": 9.357619362085027e-05, + "loss": 1.4992, + "step": 506 + }, + { + "epoch": 1.58, + "learning_rate": 9.32472570308755e-05, + "loss": 1.4626, + "step": 507 + }, + { + "epoch": 1.58, + "learning_rate": 9.291839382361481e-05, + "loss": 1.4984, + "step": 508 + }, + { + "epoch": 1.58, + "learning_rate": 9.258960757285664e-05, + "loss": 1.3692, + "step": 509 + }, + { + "epoch": 1.59, + "learning_rate": 9.226090185155314e-05, + "loss": 1.4325, + "step": 510 + }, + { + "epoch": 1.59, + "learning_rate": 9.19322802317813e-05, + "loss": 1.5049, + "step": 511 + }, + { + "epoch": 1.59, + "learning_rate": 9.160374628470421e-05, + "loss": 1.4589, + "step": 512 + }, + { + "epoch": 1.6, + "learning_rate": 9.127530358053218e-05, + "loss": 1.4291, + "step": 513 + }, + { + "epoch": 1.6, + "learning_rate": 9.094695568848402e-05, + "loss": 1.4474, + "step": 514 + }, + { + "epoch": 1.6, + "learning_rate": 9.061870617674817e-05, + "loss": 1.513, + "step": 515 + }, + { + "epoch": 1.6, + "learning_rate": 9.029055861244397e-05, + "loss": 1.4609, + "step": 516 + }, + { + "epoch": 1.61, + "learning_rate": 8.99625165615829e-05, + "loss": 1.5144, + "step": 517 + }, + { + "epoch": 1.61, + "learning_rate": 8.963458358902985e-05, + "loss": 1.4294, + "step": 518 + }, + { + "epoch": 1.61, + "learning_rate": 8.93067632584642e-05, + "loss": 1.4516, + "step": 519 + }, + { + "epoch": 1.62, + "learning_rate": 8.897905913234143e-05, + "loss": 1.4659, + "step": 520 + }, + { + "epoch": 1.62, + "learning_rate": 8.865147477185405e-05, + "loss": 1.4787, + "step": 521 + }, + { + "epoch": 1.62, + "learning_rate": 8.832401373689319e-05, + "loss": 1.4601, + "step": 522 + }, + { + "epoch": 1.63, + "learning_rate": 8.799667958600973e-05, + "loss": 1.4955, + "step": 523 + }, + { + "epoch": 1.63, + "learning_rate": 8.766947587637573e-05, + "loss": 1.4231, + "step": 524 + }, + { + "epoch": 1.63, + "learning_rate": 8.734240616374565e-05, + "loss": 1.4952, + "step": 525 + }, + { + "epoch": 1.64, + "learning_rate": 8.701547400241788e-05, + "loss": 1.4707, + "step": 526 + }, + { + "epoch": 1.64, + "learning_rate": 8.668868294519593e-05, + "loss": 1.5023, + "step": 527 + }, + { + "epoch": 1.64, + "learning_rate": 8.636203654335002e-05, + "loss": 1.4702, + "step": 528 + }, + { + "epoch": 1.65, + "learning_rate": 8.603553834657836e-05, + "loss": 1.4399, + "step": 529 + }, + { + "epoch": 1.65, + "learning_rate": 8.570919190296855e-05, + "loss": 1.5175, + "step": 530 + }, + { + "epoch": 1.65, + "learning_rate": 8.53830007589591e-05, + "loss": 1.4715, + "step": 531 + }, + { + "epoch": 1.65, + "learning_rate": 8.505696845930096e-05, + "loss": 1.5292, + "step": 532 + }, + { + "epoch": 1.66, + "learning_rate": 8.473109854701869e-05, + "loss": 1.5287, + "step": 533 + }, + { + "epoch": 1.66, + "learning_rate": 8.440539456337235e-05, + "loss": 1.4762, + "step": 534 + }, + { + "epoch": 1.66, + "learning_rate": 8.407986004781879e-05, + "loss": 1.4536, + "step": 535 + }, + { + "epoch": 1.67, + "learning_rate": 8.375449853797322e-05, + "loss": 1.5018, + "step": 536 + }, + { + "epoch": 1.67, + "learning_rate": 8.342931356957076e-05, + "loss": 1.4723, + "step": 537 + }, + { + "epoch": 1.67, + "learning_rate": 8.310430867642812e-05, + "loss": 1.4905, + "step": 538 + }, + { + "epoch": 1.68, + "learning_rate": 8.277948739040503e-05, + "loss": 1.4651, + "step": 539 + }, + { + "epoch": 1.68, + "learning_rate": 8.245485324136597e-05, + "loss": 1.4482, + "step": 540 + }, + { + "epoch": 1.68, + "learning_rate": 8.213040975714175e-05, + "loss": 1.3977, + "step": 541 + }, + { + "epoch": 1.69, + "learning_rate": 8.180616046349129e-05, + "loss": 1.5594, + "step": 542 + }, + { + "epoch": 1.69, + "learning_rate": 8.148210888406316e-05, + "loss": 1.4995, + "step": 543 + }, + { + "epoch": 1.69, + "learning_rate": 8.115825854035737e-05, + "loss": 1.5106, + "step": 544 + }, + { + "epoch": 1.7, + "learning_rate": 8.083461295168707e-05, + "loss": 1.4219, + "step": 545 + }, + { + "epoch": 1.7, + "learning_rate": 8.051117563514036e-05, + "loss": 1.4766, + "step": 546 + }, + { + "epoch": 1.7, + "learning_rate": 8.018795010554193e-05, + "loss": 1.5241, + "step": 547 + }, + { + "epoch": 1.7, + "learning_rate": 7.986493987541502e-05, + "loss": 1.4673, + "step": 548 + }, + { + "epoch": 1.71, + "learning_rate": 7.954214845494325e-05, + "loss": 1.4236, + "step": 549 + }, + { + "epoch": 1.71, + "learning_rate": 7.921957935193232e-05, + "loss": 1.4687, + "step": 550 + }, + { + "epoch": 1.71, + "eval_loss": 1.617763876914978, + "eval_runtime": 233.6334, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 550 + }, + { + "epoch": 1.71, + "learning_rate": 7.889723607177202e-05, + "loss": 1.4412, + "step": 551 + }, + { + "epoch": 1.72, + "learning_rate": 7.857512211739813e-05, + "loss": 1.4464, + "step": 552 + }, + { + "epoch": 1.72, + "learning_rate": 7.825324098925427e-05, + "loss": 1.4043, + "step": 553 + }, + { + "epoch": 1.72, + "learning_rate": 7.793159618525393e-05, + "loss": 1.4384, + "step": 554 + }, + { + "epoch": 1.73, + "learning_rate": 7.761019120074245e-05, + "loss": 1.4781, + "step": 555 + }, + { + "epoch": 1.73, + "learning_rate": 7.728902952845905e-05, + "loss": 1.4311, + "step": 556 + }, + { + "epoch": 1.73, + "learning_rate": 7.696811465849883e-05, + "loss": 1.4926, + "step": 557 + }, + { + "epoch": 1.74, + "learning_rate": 7.664745007827489e-05, + "loss": 1.4739, + "step": 558 + }, + { + "epoch": 1.74, + "learning_rate": 7.632703927248033e-05, + "loss": 1.509, + "step": 559 + }, + { + "epoch": 1.74, + "learning_rate": 7.60068857230506e-05, + "loss": 1.4555, + "step": 560 + }, + { + "epoch": 1.74, + "learning_rate": 7.568699290912533e-05, + "loss": 1.4588, + "step": 561 + }, + { + "epoch": 1.75, + "learning_rate": 7.536736430701088e-05, + "loss": 1.4574, + "step": 562 + }, + { + "epoch": 1.75, + "learning_rate": 7.504800339014232e-05, + "loss": 1.4805, + "step": 563 + }, + { + "epoch": 1.75, + "learning_rate": 7.472891362904577e-05, + "loss": 1.5081, + "step": 564 + }, + { + "epoch": 1.76, + "learning_rate": 7.441009849130067e-05, + "loss": 1.5081, + "step": 565 + }, + { + "epoch": 1.76, + "learning_rate": 7.409156144150213e-05, + "loss": 1.4548, + "step": 566 + }, + { + "epoch": 1.76, + "learning_rate": 7.377330594122317e-05, + "loss": 1.4478, + "step": 567 + }, + { + "epoch": 1.77, + "learning_rate": 7.34553354489773e-05, + "loss": 1.5048, + "step": 568 + }, + { + "epoch": 1.77, + "learning_rate": 7.31376534201807e-05, + "loss": 1.4889, + "step": 569 + }, + { + "epoch": 1.77, + "learning_rate": 7.282026330711489e-05, + "loss": 1.5045, + "step": 570 + }, + { + "epoch": 1.78, + "learning_rate": 7.250316855888906e-05, + "loss": 1.4352, + "step": 571 + }, + { + "epoch": 1.78, + "learning_rate": 7.218637262140268e-05, + "loss": 1.4881, + "step": 572 + }, + { + "epoch": 1.78, + "learning_rate": 7.186987893730797e-05, + "loss": 1.449, + "step": 573 + }, + { + "epoch": 1.79, + "learning_rate": 7.155369094597253e-05, + "loss": 1.4146, + "step": 574 + }, + { + "epoch": 1.79, + "learning_rate": 7.1237812083442e-05, + "loss": 1.4462, + "step": 575 + }, + { + "epoch": 1.79, + "learning_rate": 7.092224578240269e-05, + "loss": 1.4509, + "step": 576 + }, + { + "epoch": 1.79, + "learning_rate": 7.060699547214427e-05, + "loss": 1.4483, + "step": 577 + }, + { + "epoch": 1.8, + "learning_rate": 7.029206457852247e-05, + "loss": 1.4348, + "step": 578 + }, + { + "epoch": 1.8, + "learning_rate": 6.997745652392191e-05, + "loss": 1.4931, + "step": 579 + }, + { + "epoch": 1.8, + "learning_rate": 6.966317472721897e-05, + "loss": 1.4132, + "step": 580 + }, + { + "epoch": 1.81, + "learning_rate": 6.934922260374437e-05, + "loss": 1.3974, + "step": 581 + }, + { + "epoch": 1.81, + "learning_rate": 6.903560356524641e-05, + "loss": 1.4326, + "step": 582 + }, + { + "epoch": 1.81, + "learning_rate": 6.872232101985363e-05, + "loss": 1.4349, + "step": 583 + }, + { + "epoch": 1.82, + "learning_rate": 6.840937837203791e-05, + "loss": 1.4528, + "step": 584 + }, + { + "epoch": 1.82, + "learning_rate": 6.809677902257742e-05, + "loss": 1.4365, + "step": 585 + }, + { + "epoch": 1.82, + "learning_rate": 6.778452636851968e-05, + "loss": 1.4702, + "step": 586 + }, + { + "epoch": 1.83, + "learning_rate": 6.747262380314463e-05, + "loss": 1.458, + "step": 587 + }, + { + "epoch": 1.83, + "learning_rate": 6.71610747159277e-05, + "loss": 1.5413, + "step": 588 + }, + { + "epoch": 1.83, + "learning_rate": 6.684988249250314e-05, + "loss": 1.4205, + "step": 589 + }, + { + "epoch": 1.84, + "learning_rate": 6.653905051462708e-05, + "loss": 1.4643, + "step": 590 + }, + { + "epoch": 1.84, + "learning_rate": 6.622858216014084e-05, + "loss": 1.4071, + "step": 591 + }, + { + "epoch": 1.84, + "learning_rate": 6.591848080293418e-05, + "loss": 1.4669, + "step": 592 + }, + { + "epoch": 1.84, + "learning_rate": 6.56087498129087e-05, + "loss": 1.5062, + "step": 593 + }, + { + "epoch": 1.85, + "learning_rate": 6.52993925559412e-05, + "loss": 1.4334, + "step": 594 + }, + { + "epoch": 1.85, + "learning_rate": 6.499041239384698e-05, + "loss": 1.4696, + "step": 595 + }, + { + "epoch": 1.85, + "learning_rate": 6.468181268434354e-05, + "loss": 1.4575, + "step": 596 + }, + { + "epoch": 1.86, + "learning_rate": 6.437359678101389e-05, + "loss": 1.4432, + "step": 597 + }, + { + "epoch": 1.86, + "learning_rate": 6.406576803327022e-05, + "loss": 1.5047, + "step": 598 + }, + { + "epoch": 1.86, + "learning_rate": 6.375832978631743e-05, + "loss": 1.4297, + "step": 599 + }, + { + "epoch": 1.87, + "learning_rate": 6.345128538111685e-05, + "loss": 1.461, + "step": 600 + }, + { + "epoch": 1.87, + "eval_loss": 1.6174333095550537, + "eval_runtime": 233.649, + "eval_samples_per_second": 16.362, + "eval_steps_per_second": 4.092, + "step": 600 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 1.6825629945102336e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-600/training_args.bin b/checkpoint-600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-650/README.md b/checkpoint-650/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-650/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-650/adapter_config.json b/checkpoint-650/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-650/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-650/adapter_model.bin b/checkpoint-650/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..c059ceb57c831662b5a25b57abe3fd69777b1c06 --- /dev/null +++ b/checkpoint-650/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c53989483522f2ba85709b02fd39606c2fe2fa450f87f5c953e5d51b8c3932af +size 84046925 diff --git a/checkpoint-650/optimizer.pt b/checkpoint-650/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..58867da6b3c2c662fff2ca51162417810fdb9922 --- /dev/null +++ b/checkpoint-650/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b1c96c4339a959e139a41ecdd0f98e984f2ed3b4c70a80c39b9fd5fca4489714 +size 168039557 diff --git a/checkpoint-650/rng_state.pth b/checkpoint-650/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..8ac442de7ef10f78ca573555d99908a8c7599a33 --- /dev/null +++ b/checkpoint-650/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dfadb7c1233e21dd1107af413819f6dd56354fe5e4b3b7bcc5e7b52a174caf4c +size 14575 diff --git a/checkpoint-650/scheduler.pt b/checkpoint-650/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..8dc5bd394e88a6657aa5528ae62aa9536d43e32b --- /dev/null +++ b/checkpoint-650/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e380d17669bbebfded5d650312015406712d4d8f49d146771141aba4195a0c7 +size 627 diff --git a/checkpoint-650/trainer_state.json b/checkpoint-650/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..6b16f0c499688ea033a0a6910838e91bf26b73b5 --- /dev/null +++ b/checkpoint-650/trainer_state.json @@ -0,0 +1,4023 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 2.021772939346812, + "eval_steps": 50, + "global_step": 650, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012781362737859735, + "loss": 1.4867, + "step": 401 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012749683144111095, + "loss": 1.4923, + "step": 402 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012717973669288513, + "loss": 1.4858, + "step": 403 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012686234657981933, + "loss": 1.4464, + "step": 404 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012654466455102272, + "loss": 1.4598, + "step": 405 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012622669405877685, + "loss": 1.4237, + "step": 406 + }, + { + "epoch": 1.27, + "learning_rate": 0.0001259084385584979, + "loss": 1.475, + "step": 407 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012558990150869935, + "loss": 1.5201, + "step": 408 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012527108637095427, + "loss": 1.4735, + "step": 409 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012495199660985767, + "loss": 1.4676, + "step": 410 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012463263569298914, + "loss": 1.4671, + "step": 411 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012431300709087468, + "loss": 1.4724, + "step": 412 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012399311427694945, + "loss": 1.5451, + "step": 413 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001236729607275197, + "loss": 1.492, + "step": 414 + }, + { + "epoch": 1.29, + "learning_rate": 0.00012335254992172512, + "loss": 1.5186, + "step": 415 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001230318853415012, + "loss": 1.4622, + "step": 416 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012271097047154096, + "loss": 1.4937, + "step": 417 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012238980879925756, + "loss": 1.4575, + "step": 418 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012206840381474608, + "loss": 1.4801, + "step": 419 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012174675901074577, + "loss": 1.4523, + "step": 420 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012142487788260191, + "loss": 1.4957, + "step": 421 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012110276392822799, + "loss": 1.4757, + "step": 422 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001207804206480677, + "loss": 1.4769, + "step": 423 + }, + { + "epoch": 1.32, + "learning_rate": 0.00012045785154505676, + "loss": 1.4435, + "step": 424 + }, + { + "epoch": 1.32, + "learning_rate": 0.000120135060124585, + "loss": 1.5211, + "step": 425 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011981204989445811, + "loss": 1.4248, + "step": 426 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011948882436485969, + "loss": 1.4883, + "step": 427 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011916538704831293, + "loss": 1.4919, + "step": 428 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011884174145964262, + "loss": 1.4689, + "step": 429 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011851789111593688, + "loss": 1.4071, + "step": 430 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011819383953650874, + "loss": 1.4418, + "step": 431 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011786959024285826, + "loss": 1.5206, + "step": 432 + }, + { + "epoch": 1.35, + "learning_rate": 0.00011754514675863408, + "loss": 1.446, + "step": 433 + }, + { + "epoch": 1.35, + "learning_rate": 0.000117220512609595, + "loss": 1.5165, + "step": 434 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001168956913235719, + "loss": 1.4119, + "step": 435 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011657068643042924, + "loss": 1.503, + "step": 436 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011624550146202682, + "loss": 1.4573, + "step": 437 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011592013995218123, + "loss": 1.4707, + "step": 438 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011559460543662768, + "loss": 1.4304, + "step": 439 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011526890145298137, + "loss": 1.4465, + "step": 440 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001149430315406991, + "loss": 1.4912, + "step": 441 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001146169992410409, + "loss": 1.4549, + "step": 442 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011429080809703145, + "loss": 1.4528, + "step": 443 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011396446165342165, + "loss": 1.4148, + "step": 444 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011363796345665001, + "loss": 1.467, + "step": 445 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001133113170548041, + "loss": 1.492, + "step": 446 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011298452599758217, + "loss": 1.5244, + "step": 447 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011265759383625436, + "loss": 1.4553, + "step": 448 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001123305241236243, + "loss": 1.4764, + "step": 449 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011200332041399027, + "loss": 1.4354, + "step": 450 + }, + { + "epoch": 1.4, + "eval_loss": 1.6193681955337524, + "eval_runtime": 233.6751, + "eval_samples_per_second": 16.36, + "eval_steps_per_second": 4.091, + "step": 450 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011167598626310682, + "loss": 1.4946, + "step": 451 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011134852522814596, + "loss": 1.4558, + "step": 452 + }, + { + "epoch": 1.41, + "learning_rate": 0.0001110209408676586, + "loss": 1.4549, + "step": 453 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011069323674153585, + "loss": 1.4992, + "step": 454 + }, + { + "epoch": 1.42, + "learning_rate": 0.0001103654164109702, + "loss": 1.4828, + "step": 455 + }, + { + "epoch": 1.42, + "learning_rate": 0.00011003748343841711, + "loss": 1.4939, + "step": 456 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010970944138755604, + "loss": 1.4761, + "step": 457 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010938129382325184, + "loss": 1.4394, + "step": 458 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010905304431151602, + "loss": 1.4852, + "step": 459 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010872469641946783, + "loss": 1.4479, + "step": 460 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010839625371529583, + "loss": 1.5161, + "step": 461 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010806771976821872, + "loss": 1.5104, + "step": 462 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001077390981484469, + "loss": 1.5056, + "step": 463 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010741039242714337, + "loss": 1.4919, + "step": 464 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010708160617638521, + "loss": 1.4605, + "step": 465 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010675274296912452, + "loss": 1.5191, + "step": 466 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010642380637914975, + "loss": 1.4504, + "step": 467 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010609479998104684, + "loss": 1.4619, + "step": 468 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010576572735016016, + "loss": 1.4619, + "step": 469 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010543659206255409, + "loss": 1.4962, + "step": 470 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010510739769497378, + "loss": 1.4901, + "step": 471 + }, + { + "epoch": 1.47, + "learning_rate": 0.0001047781478248063, + "loss": 1.4708, + "step": 472 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010444884603004213, + "loss": 1.4756, + "step": 473 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010411949588923577, + "loss": 1.3948, + "step": 474 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010379010098146728, + "loss": 1.5183, + "step": 475 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010346066488630308, + "loss": 1.4252, + "step": 476 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010313119118375727, + "loss": 1.4686, + "step": 477 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010280168345425256, + "loss": 1.5285, + "step": 478 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010247214527858149, + "loss": 1.4649, + "step": 479 + }, + { + "epoch": 1.49, + "learning_rate": 0.0001021425802378674, + "loss": 1.4602, + "step": 480 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010181299191352566, + "loss": 1.5102, + "step": 481 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010148338388722465, + "loss": 1.4894, + "step": 482 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010115375974084677, + "loss": 1.501, + "step": 483 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010082412305644964, + "loss": 1.481, + "step": 484 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010049447741622717, + "loss": 1.4927, + "step": 485 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010016482640247058, + "loss": 1.512, + "step": 486 + }, + { + "epoch": 1.51, + "learning_rate": 9.983517359752945e-05, + "loss": 1.4622, + "step": 487 + }, + { + "epoch": 1.52, + "learning_rate": 9.950552258377284e-05, + "loss": 1.4956, + "step": 488 + }, + { + "epoch": 1.52, + "learning_rate": 9.917587694355037e-05, + "loss": 1.493, + "step": 489 + }, + { + "epoch": 1.52, + "learning_rate": 9.884624025915328e-05, + "loss": 1.4629, + "step": 490 + }, + { + "epoch": 1.53, + "learning_rate": 9.851661611277537e-05, + "loss": 1.4531, + "step": 491 + }, + { + "epoch": 1.53, + "learning_rate": 9.818700808647435e-05, + "loss": 1.4656, + "step": 492 + }, + { + "epoch": 1.53, + "learning_rate": 9.785741976213261e-05, + "loss": 1.4982, + "step": 493 + }, + { + "epoch": 1.54, + "learning_rate": 9.752785472141854e-05, + "loss": 1.5053, + "step": 494 + }, + { + "epoch": 1.54, + "learning_rate": 9.719831654574745e-05, + "loss": 1.4619, + "step": 495 + }, + { + "epoch": 1.54, + "learning_rate": 9.686880881624275e-05, + "loss": 1.486, + "step": 496 + }, + { + "epoch": 1.55, + "learning_rate": 9.653933511369696e-05, + "loss": 1.4788, + "step": 497 + }, + { + "epoch": 1.55, + "learning_rate": 9.620989901853275e-05, + "loss": 1.4663, + "step": 498 + }, + { + "epoch": 1.55, + "learning_rate": 9.588050411076424e-05, + "loss": 1.5138, + "step": 499 + }, + { + "epoch": 1.56, + "learning_rate": 9.555115396995788e-05, + "loss": 1.4427, + "step": 500 + }, + { + "epoch": 1.56, + "eval_loss": 1.6187018156051636, + "eval_runtime": 233.6591, + "eval_samples_per_second": 16.361, + "eval_steps_per_second": 4.091, + "step": 500 + }, + { + "epoch": 1.56, + "learning_rate": 9.522185217519371e-05, + "loss": 1.4696, + "step": 501 + }, + { + "epoch": 1.56, + "learning_rate": 9.489260230502626e-05, + "loss": 1.4052, + "step": 502 + }, + { + "epoch": 1.56, + "learning_rate": 9.45634079374459e-05, + "loss": 1.4688, + "step": 503 + }, + { + "epoch": 1.57, + "learning_rate": 9.423427264983986e-05, + "loss": 1.4266, + "step": 504 + }, + { + "epoch": 1.57, + "learning_rate": 9.390520001895321e-05, + "loss": 1.4887, + "step": 505 + }, + { + "epoch": 1.57, + "learning_rate": 9.357619362085027e-05, + "loss": 1.4992, + "step": 506 + }, + { + "epoch": 1.58, + "learning_rate": 9.32472570308755e-05, + "loss": 1.4626, + "step": 507 + }, + { + "epoch": 1.58, + "learning_rate": 9.291839382361481e-05, + "loss": 1.4984, + "step": 508 + }, + { + "epoch": 1.58, + "learning_rate": 9.258960757285664e-05, + "loss": 1.3692, + "step": 509 + }, + { + "epoch": 1.59, + "learning_rate": 9.226090185155314e-05, + "loss": 1.4325, + "step": 510 + }, + { + "epoch": 1.59, + "learning_rate": 9.19322802317813e-05, + "loss": 1.5049, + "step": 511 + }, + { + "epoch": 1.59, + "learning_rate": 9.160374628470421e-05, + "loss": 1.4589, + "step": 512 + }, + { + "epoch": 1.6, + "learning_rate": 9.127530358053218e-05, + "loss": 1.4291, + "step": 513 + }, + { + "epoch": 1.6, + "learning_rate": 9.094695568848402e-05, + "loss": 1.4474, + "step": 514 + }, + { + "epoch": 1.6, + "learning_rate": 9.061870617674817e-05, + "loss": 1.513, + "step": 515 + }, + { + "epoch": 1.6, + "learning_rate": 9.029055861244397e-05, + "loss": 1.4609, + "step": 516 + }, + { + "epoch": 1.61, + "learning_rate": 8.99625165615829e-05, + "loss": 1.5144, + "step": 517 + }, + { + "epoch": 1.61, + "learning_rate": 8.963458358902985e-05, + "loss": 1.4294, + "step": 518 + }, + { + "epoch": 1.61, + "learning_rate": 8.93067632584642e-05, + "loss": 1.4516, + "step": 519 + }, + { + "epoch": 1.62, + "learning_rate": 8.897905913234143e-05, + "loss": 1.4659, + "step": 520 + }, + { + "epoch": 1.62, + "learning_rate": 8.865147477185405e-05, + "loss": 1.4787, + "step": 521 + }, + { + "epoch": 1.62, + "learning_rate": 8.832401373689319e-05, + "loss": 1.4601, + "step": 522 + }, + { + "epoch": 1.63, + "learning_rate": 8.799667958600973e-05, + "loss": 1.4955, + "step": 523 + }, + { + "epoch": 1.63, + "learning_rate": 8.766947587637573e-05, + "loss": 1.4231, + "step": 524 + }, + { + "epoch": 1.63, + "learning_rate": 8.734240616374565e-05, + "loss": 1.4952, + "step": 525 + }, + { + "epoch": 1.64, + "learning_rate": 8.701547400241788e-05, + "loss": 1.4707, + "step": 526 + }, + { + "epoch": 1.64, + "learning_rate": 8.668868294519593e-05, + "loss": 1.5023, + "step": 527 + }, + { + "epoch": 1.64, + "learning_rate": 8.636203654335002e-05, + "loss": 1.4702, + "step": 528 + }, + { + "epoch": 1.65, + "learning_rate": 8.603553834657836e-05, + "loss": 1.4399, + "step": 529 + }, + { + "epoch": 1.65, + "learning_rate": 8.570919190296855e-05, + "loss": 1.5175, + "step": 530 + }, + { + "epoch": 1.65, + "learning_rate": 8.53830007589591e-05, + "loss": 1.4715, + "step": 531 + }, + { + "epoch": 1.65, + "learning_rate": 8.505696845930096e-05, + "loss": 1.5292, + "step": 532 + }, + { + "epoch": 1.66, + "learning_rate": 8.473109854701869e-05, + "loss": 1.5287, + "step": 533 + }, + { + "epoch": 1.66, + "learning_rate": 8.440539456337235e-05, + "loss": 1.4762, + "step": 534 + }, + { + "epoch": 1.66, + "learning_rate": 8.407986004781879e-05, + "loss": 1.4536, + "step": 535 + }, + { + "epoch": 1.67, + "learning_rate": 8.375449853797322e-05, + "loss": 1.5018, + "step": 536 + }, + { + "epoch": 1.67, + "learning_rate": 8.342931356957076e-05, + "loss": 1.4723, + "step": 537 + }, + { + "epoch": 1.67, + "learning_rate": 8.310430867642812e-05, + "loss": 1.4905, + "step": 538 + }, + { + "epoch": 1.68, + "learning_rate": 8.277948739040503e-05, + "loss": 1.4651, + "step": 539 + }, + { + "epoch": 1.68, + "learning_rate": 8.245485324136597e-05, + "loss": 1.4482, + "step": 540 + }, + { + "epoch": 1.68, + "learning_rate": 8.213040975714175e-05, + "loss": 1.3977, + "step": 541 + }, + { + "epoch": 1.69, + "learning_rate": 8.180616046349129e-05, + "loss": 1.5594, + "step": 542 + }, + { + "epoch": 1.69, + "learning_rate": 8.148210888406316e-05, + "loss": 1.4995, + "step": 543 + }, + { + "epoch": 1.69, + "learning_rate": 8.115825854035737e-05, + "loss": 1.5106, + "step": 544 + }, + { + "epoch": 1.7, + "learning_rate": 8.083461295168707e-05, + "loss": 1.4219, + "step": 545 + }, + { + "epoch": 1.7, + "learning_rate": 8.051117563514036e-05, + "loss": 1.4766, + "step": 546 + }, + { + "epoch": 1.7, + "learning_rate": 8.018795010554193e-05, + "loss": 1.5241, + "step": 547 + }, + { + "epoch": 1.7, + "learning_rate": 7.986493987541502e-05, + "loss": 1.4673, + "step": 548 + }, + { + "epoch": 1.71, + "learning_rate": 7.954214845494325e-05, + "loss": 1.4236, + "step": 549 + }, + { + "epoch": 1.71, + "learning_rate": 7.921957935193232e-05, + "loss": 1.4687, + "step": 550 + }, + { + "epoch": 1.71, + "eval_loss": 1.617763876914978, + "eval_runtime": 233.6334, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 550 + }, + { + "epoch": 1.71, + "learning_rate": 7.889723607177202e-05, + "loss": 1.4412, + "step": 551 + }, + { + "epoch": 1.72, + "learning_rate": 7.857512211739813e-05, + "loss": 1.4464, + "step": 552 + }, + { + "epoch": 1.72, + "learning_rate": 7.825324098925427e-05, + "loss": 1.4043, + "step": 553 + }, + { + "epoch": 1.72, + "learning_rate": 7.793159618525393e-05, + "loss": 1.4384, + "step": 554 + }, + { + "epoch": 1.73, + "learning_rate": 7.761019120074245e-05, + "loss": 1.4781, + "step": 555 + }, + { + "epoch": 1.73, + "learning_rate": 7.728902952845905e-05, + "loss": 1.4311, + "step": 556 + }, + { + "epoch": 1.73, + "learning_rate": 7.696811465849883e-05, + "loss": 1.4926, + "step": 557 + }, + { + "epoch": 1.74, + "learning_rate": 7.664745007827489e-05, + "loss": 1.4739, + "step": 558 + }, + { + "epoch": 1.74, + "learning_rate": 7.632703927248033e-05, + "loss": 1.509, + "step": 559 + }, + { + "epoch": 1.74, + "learning_rate": 7.60068857230506e-05, + "loss": 1.4555, + "step": 560 + }, + { + "epoch": 1.74, + "learning_rate": 7.568699290912533e-05, + "loss": 1.4588, + "step": 561 + }, + { + "epoch": 1.75, + "learning_rate": 7.536736430701088e-05, + "loss": 1.4574, + "step": 562 + }, + { + "epoch": 1.75, + "learning_rate": 7.504800339014232e-05, + "loss": 1.4805, + "step": 563 + }, + { + "epoch": 1.75, + "learning_rate": 7.472891362904577e-05, + "loss": 1.5081, + "step": 564 + }, + { + "epoch": 1.76, + "learning_rate": 7.441009849130067e-05, + "loss": 1.5081, + "step": 565 + }, + { + "epoch": 1.76, + "learning_rate": 7.409156144150213e-05, + "loss": 1.4548, + "step": 566 + }, + { + "epoch": 1.76, + "learning_rate": 7.377330594122317e-05, + "loss": 1.4478, + "step": 567 + }, + { + "epoch": 1.77, + "learning_rate": 7.34553354489773e-05, + "loss": 1.5048, + "step": 568 + }, + { + "epoch": 1.77, + "learning_rate": 7.31376534201807e-05, + "loss": 1.4889, + "step": 569 + }, + { + "epoch": 1.77, + "learning_rate": 7.282026330711489e-05, + "loss": 1.5045, + "step": 570 + }, + { + "epoch": 1.78, + "learning_rate": 7.250316855888906e-05, + "loss": 1.4352, + "step": 571 + }, + { + "epoch": 1.78, + "learning_rate": 7.218637262140268e-05, + "loss": 1.4881, + "step": 572 + }, + { + "epoch": 1.78, + "learning_rate": 7.186987893730797e-05, + "loss": 1.449, + "step": 573 + }, + { + "epoch": 1.79, + "learning_rate": 7.155369094597253e-05, + "loss": 1.4146, + "step": 574 + }, + { + "epoch": 1.79, + "learning_rate": 7.1237812083442e-05, + "loss": 1.4462, + "step": 575 + }, + { + "epoch": 1.79, + "learning_rate": 7.092224578240269e-05, + "loss": 1.4509, + "step": 576 + }, + { + "epoch": 1.79, + "learning_rate": 7.060699547214427e-05, + "loss": 1.4483, + "step": 577 + }, + { + "epoch": 1.8, + "learning_rate": 7.029206457852247e-05, + "loss": 1.4348, + "step": 578 + }, + { + "epoch": 1.8, + "learning_rate": 6.997745652392191e-05, + "loss": 1.4931, + "step": 579 + }, + { + "epoch": 1.8, + "learning_rate": 6.966317472721897e-05, + "loss": 1.4132, + "step": 580 + }, + { + "epoch": 1.81, + "learning_rate": 6.934922260374437e-05, + "loss": 1.3974, + "step": 581 + }, + { + "epoch": 1.81, + "learning_rate": 6.903560356524641e-05, + "loss": 1.4326, + "step": 582 + }, + { + "epoch": 1.81, + "learning_rate": 6.872232101985363e-05, + "loss": 1.4349, + "step": 583 + }, + { + "epoch": 1.82, + "learning_rate": 6.840937837203791e-05, + "loss": 1.4528, + "step": 584 + }, + { + "epoch": 1.82, + "learning_rate": 6.809677902257742e-05, + "loss": 1.4365, + "step": 585 + }, + { + "epoch": 1.82, + "learning_rate": 6.778452636851968e-05, + "loss": 1.4702, + "step": 586 + }, + { + "epoch": 1.83, + "learning_rate": 6.747262380314463e-05, + "loss": 1.458, + "step": 587 + }, + { + "epoch": 1.83, + "learning_rate": 6.71610747159277e-05, + "loss": 1.5413, + "step": 588 + }, + { + "epoch": 1.83, + "learning_rate": 6.684988249250314e-05, + "loss": 1.4205, + "step": 589 + }, + { + "epoch": 1.84, + "learning_rate": 6.653905051462708e-05, + "loss": 1.4643, + "step": 590 + }, + { + "epoch": 1.84, + "learning_rate": 6.622858216014084e-05, + "loss": 1.4071, + "step": 591 + }, + { + "epoch": 1.84, + "learning_rate": 6.591848080293418e-05, + "loss": 1.4669, + "step": 592 + }, + { + "epoch": 1.84, + "learning_rate": 6.56087498129087e-05, + "loss": 1.5062, + "step": 593 + }, + { + "epoch": 1.85, + "learning_rate": 6.52993925559412e-05, + "loss": 1.4334, + "step": 594 + }, + { + "epoch": 1.85, + "learning_rate": 6.499041239384698e-05, + "loss": 1.4696, + "step": 595 + }, + { + "epoch": 1.85, + "learning_rate": 6.468181268434354e-05, + "loss": 1.4575, + "step": 596 + }, + { + "epoch": 1.86, + "learning_rate": 6.437359678101389e-05, + "loss": 1.4432, + "step": 597 + }, + { + "epoch": 1.86, + "learning_rate": 6.406576803327022e-05, + "loss": 1.5047, + "step": 598 + }, + { + "epoch": 1.86, + "learning_rate": 6.375832978631743e-05, + "loss": 1.4297, + "step": 599 + }, + { + "epoch": 1.87, + "learning_rate": 6.345128538111685e-05, + "loss": 1.461, + "step": 600 + }, + { + "epoch": 1.87, + "eval_loss": 1.6174333095550537, + "eval_runtime": 233.649, + "eval_samples_per_second": 16.362, + "eval_steps_per_second": 4.092, + "step": 600 + }, + { + "epoch": 1.87, + "learning_rate": 6.314463815434988e-05, + "loss": 1.4978, + "step": 601 + }, + { + "epoch": 1.87, + "learning_rate": 6.283839143838169e-05, + "loss": 1.426, + "step": 602 + }, + { + "epoch": 1.88, + "learning_rate": 6.253254856122511e-05, + "loss": 1.4657, + "step": 603 + }, + { + "epoch": 1.88, + "learning_rate": 6.222711284650444e-05, + "loss": 1.5282, + "step": 604 + }, + { + "epoch": 1.88, + "learning_rate": 6.192208761341925e-05, + "loss": 1.4897, + "step": 605 + }, + { + "epoch": 1.88, + "learning_rate": 6.161747617670839e-05, + "loss": 1.4827, + "step": 606 + }, + { + "epoch": 1.89, + "learning_rate": 6.131328184661396e-05, + "loss": 1.4507, + "step": 607 + }, + { + "epoch": 1.89, + "learning_rate": 6.100950792884533e-05, + "loss": 1.4461, + "step": 608 + }, + { + "epoch": 1.89, + "learning_rate": 6.070615772454312e-05, + "loss": 1.4187, + "step": 609 + }, + { + "epoch": 1.9, + "learning_rate": 6.040323453024351e-05, + "loss": 1.4704, + "step": 610 + }, + { + "epoch": 1.9, + "learning_rate": 6.0100741637842316e-05, + "loss": 1.4869, + "step": 611 + }, + { + "epoch": 1.9, + "learning_rate": 5.979868233455917e-05, + "loss": 1.4657, + "step": 612 + }, + { + "epoch": 1.91, + "learning_rate": 5.949705990290186e-05, + "loss": 1.4234, + "step": 613 + }, + { + "epoch": 1.91, + "learning_rate": 5.919587762063072e-05, + "loss": 1.4519, + "step": 614 + }, + { + "epoch": 1.91, + "learning_rate": 5.889513876072283e-05, + "loss": 1.4588, + "step": 615 + }, + { + "epoch": 1.92, + "learning_rate": 5.859484659133663e-05, + "loss": 1.4867, + "step": 616 + }, + { + "epoch": 1.92, + "learning_rate": 5.829500437577626e-05, + "loss": 1.5157, + "step": 617 + }, + { + "epoch": 1.92, + "learning_rate": 5.799561537245628e-05, + "loss": 1.4492, + "step": 618 + }, + { + "epoch": 1.93, + "learning_rate": 5.769668283486607e-05, + "loss": 1.514, + "step": 619 + }, + { + "epoch": 1.93, + "learning_rate": 5.739821001153451e-05, + "loss": 1.5127, + "step": 620 + }, + { + "epoch": 1.93, + "learning_rate": 5.710020014599486e-05, + "loss": 1.4204, + "step": 621 + }, + { + "epoch": 1.93, + "learning_rate": 5.680265647674925e-05, + "loss": 1.4346, + "step": 622 + }, + { + "epoch": 1.94, + "learning_rate": 5.650558223723365e-05, + "loss": 1.4342, + "step": 623 + }, + { + "epoch": 1.94, + "learning_rate": 5.620898065578268e-05, + "loss": 1.4699, + "step": 624 + }, + { + "epoch": 1.94, + "learning_rate": 5.591285495559453e-05, + "loss": 1.5088, + "step": 625 + }, + { + "epoch": 1.95, + "learning_rate": 5.561720835469602e-05, + "loss": 1.5015, + "step": 626 + }, + { + "epoch": 1.95, + "learning_rate": 5.5322044065907475e-05, + "loss": 1.4243, + "step": 627 + }, + { + "epoch": 1.95, + "learning_rate": 5.502736529680785e-05, + "loss": 1.4553, + "step": 628 + }, + { + "epoch": 1.96, + "learning_rate": 5.47331752497001e-05, + "loss": 1.4419, + "step": 629 + }, + { + "epoch": 1.96, + "learning_rate": 5.443947712157587e-05, + "loss": 1.4172, + "step": 630 + }, + { + "epoch": 1.96, + "learning_rate": 5.41462741040814e-05, + "loss": 1.4888, + "step": 631 + }, + { + "epoch": 1.97, + "learning_rate": 5.385356938348234e-05, + "loss": 1.412, + "step": 632 + }, + { + "epoch": 1.97, + "learning_rate": 5.3561366140629274e-05, + "loss": 1.4327, + "step": 633 + }, + { + "epoch": 1.97, + "learning_rate": 5.326966755092334e-05, + "loss": 1.502, + "step": 634 + }, + { + "epoch": 1.98, + "learning_rate": 5.297847678428141e-05, + "loss": 1.4499, + "step": 635 + }, + { + "epoch": 1.98, + "learning_rate": 5.2687797005101834e-05, + "loss": 1.4783, + "step": 636 + }, + { + "epoch": 1.98, + "learning_rate": 5.239763137223004e-05, + "loss": 1.4378, + "step": 637 + }, + { + "epoch": 1.98, + "learning_rate": 5.21079830389241e-05, + "loss": 1.5055, + "step": 638 + }, + { + "epoch": 1.99, + "learning_rate": 5.18188551528207e-05, + "loss": 1.4963, + "step": 639 + }, + { + "epoch": 1.99, + "learning_rate": 5.1530250855900576e-05, + "loss": 1.4799, + "step": 640 + }, + { + "epoch": 1.99, + "learning_rate": 5.124217328445475e-05, + "loss": 1.4388, + "step": 641 + }, + { + "epoch": 2.0, + "learning_rate": 5.095462556905021e-05, + "loss": 1.484, + "step": 642 + }, + { + "epoch": 2.0, + "learning_rate": 5.0667610834495785e-05, + "loss": 1.4811, + "step": 643 + }, + { + "epoch": 2.0, + "learning_rate": 5.03811321998086e-05, + "loss": 1.2941, + "step": 644 + }, + { + "epoch": 2.01, + "learning_rate": 5.009519277817976e-05, + "loss": 1.3975, + "step": 645 + }, + { + "epoch": 2.01, + "learning_rate": 4.9809795676940815e-05, + "loss": 1.3432, + "step": 646 + }, + { + "epoch": 2.01, + "learning_rate": 4.952494399752976e-05, + "loss": 1.3014, + "step": 647 + }, + { + "epoch": 2.02, + "learning_rate": 4.924064083545744e-05, + "loss": 1.3491, + "step": 648 + }, + { + "epoch": 2.02, + "learning_rate": 4.8956889280274056e-05, + "loss": 1.3238, + "step": 649 + }, + { + "epoch": 2.02, + "learning_rate": 4.8673692415535186e-05, + "loss": 1.327, + "step": 650 + }, + { + "epoch": 2.02, + "eval_loss": 1.6340641975402832, + "eval_runtime": 233.6965, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 650 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 1.8227765773860864e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-650/training_args.bin b/checkpoint-650/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-650/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-700/README.md b/checkpoint-700/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-700/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-700/adapter_config.json b/checkpoint-700/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-700/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-700/adapter_model.bin b/checkpoint-700/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..6ae09a0a32be4bf9230c724107a7acd9964cc51b --- /dev/null +++ b/checkpoint-700/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dca4a6ddb72c7f7748f914a919fe8f9544022dd498601c3ced4702e5ec3646f8 +size 84046925 diff --git a/checkpoint-700/optimizer.pt b/checkpoint-700/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..eeea3e09ce8b3c42cd93270627645388c2b2f99b --- /dev/null +++ b/checkpoint-700/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ab1342d49e45fa5c6ec6e6be47caa1b11b927d108ea2268e95909763e56aa6b8 +size 168039557 diff --git a/checkpoint-700/rng_state.pth b/checkpoint-700/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..0e1ad98ea9ea0a9254cdc9d9fbff349209225a5d --- /dev/null +++ b/checkpoint-700/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e0ac5b235a3a915cc9f672bf529ac82c9fd081bc098d2aa56c90676eb56ce1f +size 14575 diff --git a/checkpoint-700/scheduler.pt b/checkpoint-700/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..b62c47c9b74bad28eb43feaa8d04d59773da383b --- /dev/null +++ b/checkpoint-700/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0696378a5370f2e210c2ed0fe95d008e273ac344f8696773d7de139157957b12 +size 627 diff --git a/checkpoint-700/trainer_state.json b/checkpoint-700/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..ae05e58a8595583bd6bd65b13f93b9aad1fa162f --- /dev/null +++ b/checkpoint-700/trainer_state.json @@ -0,0 +1,4331 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 2.177293934681182, + "eval_steps": 50, + "global_step": 700, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012781362737859735, + "loss": 1.4867, + "step": 401 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012749683144111095, + "loss": 1.4923, + "step": 402 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012717973669288513, + "loss": 1.4858, + "step": 403 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012686234657981933, + "loss": 1.4464, + "step": 404 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012654466455102272, + "loss": 1.4598, + "step": 405 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012622669405877685, + "loss": 1.4237, + "step": 406 + }, + { + "epoch": 1.27, + "learning_rate": 0.0001259084385584979, + "loss": 1.475, + "step": 407 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012558990150869935, + "loss": 1.5201, + "step": 408 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012527108637095427, + "loss": 1.4735, + "step": 409 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012495199660985767, + "loss": 1.4676, + "step": 410 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012463263569298914, + "loss": 1.4671, + "step": 411 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012431300709087468, + "loss": 1.4724, + "step": 412 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012399311427694945, + "loss": 1.5451, + "step": 413 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001236729607275197, + "loss": 1.492, + "step": 414 + }, + { + "epoch": 1.29, + "learning_rate": 0.00012335254992172512, + "loss": 1.5186, + "step": 415 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001230318853415012, + "loss": 1.4622, + "step": 416 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012271097047154096, + "loss": 1.4937, + "step": 417 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012238980879925756, + "loss": 1.4575, + "step": 418 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012206840381474608, + "loss": 1.4801, + "step": 419 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012174675901074577, + "loss": 1.4523, + "step": 420 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012142487788260191, + "loss": 1.4957, + "step": 421 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012110276392822799, + "loss": 1.4757, + "step": 422 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001207804206480677, + "loss": 1.4769, + "step": 423 + }, + { + "epoch": 1.32, + "learning_rate": 0.00012045785154505676, + "loss": 1.4435, + "step": 424 + }, + { + "epoch": 1.32, + "learning_rate": 0.000120135060124585, + "loss": 1.5211, + "step": 425 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011981204989445811, + "loss": 1.4248, + "step": 426 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011948882436485969, + "loss": 1.4883, + "step": 427 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011916538704831293, + "loss": 1.4919, + "step": 428 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011884174145964262, + "loss": 1.4689, + "step": 429 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011851789111593688, + "loss": 1.4071, + "step": 430 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011819383953650874, + "loss": 1.4418, + "step": 431 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011786959024285826, + "loss": 1.5206, + "step": 432 + }, + { + "epoch": 1.35, + "learning_rate": 0.00011754514675863408, + "loss": 1.446, + "step": 433 + }, + { + "epoch": 1.35, + "learning_rate": 0.000117220512609595, + "loss": 1.5165, + "step": 434 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001168956913235719, + "loss": 1.4119, + "step": 435 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011657068643042924, + "loss": 1.503, + "step": 436 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011624550146202682, + "loss": 1.4573, + "step": 437 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011592013995218123, + "loss": 1.4707, + "step": 438 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011559460543662768, + "loss": 1.4304, + "step": 439 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011526890145298137, + "loss": 1.4465, + "step": 440 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001149430315406991, + "loss": 1.4912, + "step": 441 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001146169992410409, + "loss": 1.4549, + "step": 442 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011429080809703145, + "loss": 1.4528, + "step": 443 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011396446165342165, + "loss": 1.4148, + "step": 444 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011363796345665001, + "loss": 1.467, + "step": 445 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001133113170548041, + "loss": 1.492, + "step": 446 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011298452599758217, + "loss": 1.5244, + "step": 447 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011265759383625436, + "loss": 1.4553, + "step": 448 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001123305241236243, + "loss": 1.4764, + "step": 449 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011200332041399027, + "loss": 1.4354, + "step": 450 + }, + { + "epoch": 1.4, + "eval_loss": 1.6193681955337524, + "eval_runtime": 233.6751, + "eval_samples_per_second": 16.36, + "eval_steps_per_second": 4.091, + "step": 450 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011167598626310682, + "loss": 1.4946, + "step": 451 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011134852522814596, + "loss": 1.4558, + "step": 452 + }, + { + "epoch": 1.41, + "learning_rate": 0.0001110209408676586, + "loss": 1.4549, + "step": 453 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011069323674153585, + "loss": 1.4992, + "step": 454 + }, + { + "epoch": 1.42, + "learning_rate": 0.0001103654164109702, + "loss": 1.4828, + "step": 455 + }, + { + "epoch": 1.42, + "learning_rate": 0.00011003748343841711, + "loss": 1.4939, + "step": 456 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010970944138755604, + "loss": 1.4761, + "step": 457 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010938129382325184, + "loss": 1.4394, + "step": 458 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010905304431151602, + "loss": 1.4852, + "step": 459 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010872469641946783, + "loss": 1.4479, + "step": 460 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010839625371529583, + "loss": 1.5161, + "step": 461 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010806771976821872, + "loss": 1.5104, + "step": 462 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001077390981484469, + "loss": 1.5056, + "step": 463 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010741039242714337, + "loss": 1.4919, + "step": 464 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010708160617638521, + "loss": 1.4605, + "step": 465 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010675274296912452, + "loss": 1.5191, + "step": 466 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010642380637914975, + "loss": 1.4504, + "step": 467 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010609479998104684, + "loss": 1.4619, + "step": 468 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010576572735016016, + "loss": 1.4619, + "step": 469 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010543659206255409, + "loss": 1.4962, + "step": 470 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010510739769497378, + "loss": 1.4901, + "step": 471 + }, + { + "epoch": 1.47, + "learning_rate": 0.0001047781478248063, + "loss": 1.4708, + "step": 472 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010444884603004213, + "loss": 1.4756, + "step": 473 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010411949588923577, + "loss": 1.3948, + "step": 474 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010379010098146728, + "loss": 1.5183, + "step": 475 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010346066488630308, + "loss": 1.4252, + "step": 476 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010313119118375727, + "loss": 1.4686, + "step": 477 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010280168345425256, + "loss": 1.5285, + "step": 478 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010247214527858149, + "loss": 1.4649, + "step": 479 + }, + { + "epoch": 1.49, + "learning_rate": 0.0001021425802378674, + "loss": 1.4602, + "step": 480 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010181299191352566, + "loss": 1.5102, + "step": 481 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010148338388722465, + "loss": 1.4894, + "step": 482 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010115375974084677, + "loss": 1.501, + "step": 483 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010082412305644964, + "loss": 1.481, + "step": 484 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010049447741622717, + "loss": 1.4927, + "step": 485 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010016482640247058, + "loss": 1.512, + "step": 486 + }, + { + "epoch": 1.51, + "learning_rate": 9.983517359752945e-05, + "loss": 1.4622, + "step": 487 + }, + { + "epoch": 1.52, + "learning_rate": 9.950552258377284e-05, + "loss": 1.4956, + "step": 488 + }, + { + "epoch": 1.52, + "learning_rate": 9.917587694355037e-05, + "loss": 1.493, + "step": 489 + }, + { + "epoch": 1.52, + "learning_rate": 9.884624025915328e-05, + "loss": 1.4629, + "step": 490 + }, + { + "epoch": 1.53, + "learning_rate": 9.851661611277537e-05, + "loss": 1.4531, + "step": 491 + }, + { + "epoch": 1.53, + "learning_rate": 9.818700808647435e-05, + "loss": 1.4656, + "step": 492 + }, + { + "epoch": 1.53, + "learning_rate": 9.785741976213261e-05, + "loss": 1.4982, + "step": 493 + }, + { + "epoch": 1.54, + "learning_rate": 9.752785472141854e-05, + "loss": 1.5053, + "step": 494 + }, + { + "epoch": 1.54, + "learning_rate": 9.719831654574745e-05, + "loss": 1.4619, + "step": 495 + }, + { + "epoch": 1.54, + "learning_rate": 9.686880881624275e-05, + "loss": 1.486, + "step": 496 + }, + { + "epoch": 1.55, + "learning_rate": 9.653933511369696e-05, + "loss": 1.4788, + "step": 497 + }, + { + "epoch": 1.55, + "learning_rate": 9.620989901853275e-05, + "loss": 1.4663, + "step": 498 + }, + { + "epoch": 1.55, + "learning_rate": 9.588050411076424e-05, + "loss": 1.5138, + "step": 499 + }, + { + "epoch": 1.56, + "learning_rate": 9.555115396995788e-05, + "loss": 1.4427, + "step": 500 + }, + { + "epoch": 1.56, + "eval_loss": 1.6187018156051636, + "eval_runtime": 233.6591, + "eval_samples_per_second": 16.361, + "eval_steps_per_second": 4.091, + "step": 500 + }, + { + "epoch": 1.56, + "learning_rate": 9.522185217519371e-05, + "loss": 1.4696, + "step": 501 + }, + { + "epoch": 1.56, + "learning_rate": 9.489260230502626e-05, + "loss": 1.4052, + "step": 502 + }, + { + "epoch": 1.56, + "learning_rate": 9.45634079374459e-05, + "loss": 1.4688, + "step": 503 + }, + { + "epoch": 1.57, + "learning_rate": 9.423427264983986e-05, + "loss": 1.4266, + "step": 504 + }, + { + "epoch": 1.57, + "learning_rate": 9.390520001895321e-05, + "loss": 1.4887, + "step": 505 + }, + { + "epoch": 1.57, + "learning_rate": 9.357619362085027e-05, + "loss": 1.4992, + "step": 506 + }, + { + "epoch": 1.58, + "learning_rate": 9.32472570308755e-05, + "loss": 1.4626, + "step": 507 + }, + { + "epoch": 1.58, + "learning_rate": 9.291839382361481e-05, + "loss": 1.4984, + "step": 508 + }, + { + "epoch": 1.58, + "learning_rate": 9.258960757285664e-05, + "loss": 1.3692, + "step": 509 + }, + { + "epoch": 1.59, + "learning_rate": 9.226090185155314e-05, + "loss": 1.4325, + "step": 510 + }, + { + "epoch": 1.59, + "learning_rate": 9.19322802317813e-05, + "loss": 1.5049, + "step": 511 + }, + { + "epoch": 1.59, + "learning_rate": 9.160374628470421e-05, + "loss": 1.4589, + "step": 512 + }, + { + "epoch": 1.6, + "learning_rate": 9.127530358053218e-05, + "loss": 1.4291, + "step": 513 + }, + { + "epoch": 1.6, + "learning_rate": 9.094695568848402e-05, + "loss": 1.4474, + "step": 514 + }, + { + "epoch": 1.6, + "learning_rate": 9.061870617674817e-05, + "loss": 1.513, + "step": 515 + }, + { + "epoch": 1.6, + "learning_rate": 9.029055861244397e-05, + "loss": 1.4609, + "step": 516 + }, + { + "epoch": 1.61, + "learning_rate": 8.99625165615829e-05, + "loss": 1.5144, + "step": 517 + }, + { + "epoch": 1.61, + "learning_rate": 8.963458358902985e-05, + "loss": 1.4294, + "step": 518 + }, + { + "epoch": 1.61, + "learning_rate": 8.93067632584642e-05, + "loss": 1.4516, + "step": 519 + }, + { + "epoch": 1.62, + "learning_rate": 8.897905913234143e-05, + "loss": 1.4659, + "step": 520 + }, + { + "epoch": 1.62, + "learning_rate": 8.865147477185405e-05, + "loss": 1.4787, + "step": 521 + }, + { + "epoch": 1.62, + "learning_rate": 8.832401373689319e-05, + "loss": 1.4601, + "step": 522 + }, + { + "epoch": 1.63, + "learning_rate": 8.799667958600973e-05, + "loss": 1.4955, + "step": 523 + }, + { + "epoch": 1.63, + "learning_rate": 8.766947587637573e-05, + "loss": 1.4231, + "step": 524 + }, + { + "epoch": 1.63, + "learning_rate": 8.734240616374565e-05, + "loss": 1.4952, + "step": 525 + }, + { + "epoch": 1.64, + "learning_rate": 8.701547400241788e-05, + "loss": 1.4707, + "step": 526 + }, + { + "epoch": 1.64, + "learning_rate": 8.668868294519593e-05, + "loss": 1.5023, + "step": 527 + }, + { + "epoch": 1.64, + "learning_rate": 8.636203654335002e-05, + "loss": 1.4702, + "step": 528 + }, + { + "epoch": 1.65, + "learning_rate": 8.603553834657836e-05, + "loss": 1.4399, + "step": 529 + }, + { + "epoch": 1.65, + "learning_rate": 8.570919190296855e-05, + "loss": 1.5175, + "step": 530 + }, + { + "epoch": 1.65, + "learning_rate": 8.53830007589591e-05, + "loss": 1.4715, + "step": 531 + }, + { + "epoch": 1.65, + "learning_rate": 8.505696845930096e-05, + "loss": 1.5292, + "step": 532 + }, + { + "epoch": 1.66, + "learning_rate": 8.473109854701869e-05, + "loss": 1.5287, + "step": 533 + }, + { + "epoch": 1.66, + "learning_rate": 8.440539456337235e-05, + "loss": 1.4762, + "step": 534 + }, + { + "epoch": 1.66, + "learning_rate": 8.407986004781879e-05, + "loss": 1.4536, + "step": 535 + }, + { + "epoch": 1.67, + "learning_rate": 8.375449853797322e-05, + "loss": 1.5018, + "step": 536 + }, + { + "epoch": 1.67, + "learning_rate": 8.342931356957076e-05, + "loss": 1.4723, + "step": 537 + }, + { + "epoch": 1.67, + "learning_rate": 8.310430867642812e-05, + "loss": 1.4905, + "step": 538 + }, + { + "epoch": 1.68, + "learning_rate": 8.277948739040503e-05, + "loss": 1.4651, + "step": 539 + }, + { + "epoch": 1.68, + "learning_rate": 8.245485324136597e-05, + "loss": 1.4482, + "step": 540 + }, + { + "epoch": 1.68, + "learning_rate": 8.213040975714175e-05, + "loss": 1.3977, + "step": 541 + }, + { + "epoch": 1.69, + "learning_rate": 8.180616046349129e-05, + "loss": 1.5594, + "step": 542 + }, + { + "epoch": 1.69, + "learning_rate": 8.148210888406316e-05, + "loss": 1.4995, + "step": 543 + }, + { + "epoch": 1.69, + "learning_rate": 8.115825854035737e-05, + "loss": 1.5106, + "step": 544 + }, + { + "epoch": 1.7, + "learning_rate": 8.083461295168707e-05, + "loss": 1.4219, + "step": 545 + }, + { + "epoch": 1.7, + "learning_rate": 8.051117563514036e-05, + "loss": 1.4766, + "step": 546 + }, + { + "epoch": 1.7, + "learning_rate": 8.018795010554193e-05, + "loss": 1.5241, + "step": 547 + }, + { + "epoch": 1.7, + "learning_rate": 7.986493987541502e-05, + "loss": 1.4673, + "step": 548 + }, + { + "epoch": 1.71, + "learning_rate": 7.954214845494325e-05, + "loss": 1.4236, + "step": 549 + }, + { + "epoch": 1.71, + "learning_rate": 7.921957935193232e-05, + "loss": 1.4687, + "step": 550 + }, + { + "epoch": 1.71, + "eval_loss": 1.617763876914978, + "eval_runtime": 233.6334, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 550 + }, + { + "epoch": 1.71, + "learning_rate": 7.889723607177202e-05, + "loss": 1.4412, + "step": 551 + }, + { + "epoch": 1.72, + "learning_rate": 7.857512211739813e-05, + "loss": 1.4464, + "step": 552 + }, + { + "epoch": 1.72, + "learning_rate": 7.825324098925427e-05, + "loss": 1.4043, + "step": 553 + }, + { + "epoch": 1.72, + "learning_rate": 7.793159618525393e-05, + "loss": 1.4384, + "step": 554 + }, + { + "epoch": 1.73, + "learning_rate": 7.761019120074245e-05, + "loss": 1.4781, + "step": 555 + }, + { + "epoch": 1.73, + "learning_rate": 7.728902952845905e-05, + "loss": 1.4311, + "step": 556 + }, + { + "epoch": 1.73, + "learning_rate": 7.696811465849883e-05, + "loss": 1.4926, + "step": 557 + }, + { + "epoch": 1.74, + "learning_rate": 7.664745007827489e-05, + "loss": 1.4739, + "step": 558 + }, + { + "epoch": 1.74, + "learning_rate": 7.632703927248033e-05, + "loss": 1.509, + "step": 559 + }, + { + "epoch": 1.74, + "learning_rate": 7.60068857230506e-05, + "loss": 1.4555, + "step": 560 + }, + { + "epoch": 1.74, + "learning_rate": 7.568699290912533e-05, + "loss": 1.4588, + "step": 561 + }, + { + "epoch": 1.75, + "learning_rate": 7.536736430701088e-05, + "loss": 1.4574, + "step": 562 + }, + { + "epoch": 1.75, + "learning_rate": 7.504800339014232e-05, + "loss": 1.4805, + "step": 563 + }, + { + "epoch": 1.75, + "learning_rate": 7.472891362904577e-05, + "loss": 1.5081, + "step": 564 + }, + { + "epoch": 1.76, + "learning_rate": 7.441009849130067e-05, + "loss": 1.5081, + "step": 565 + }, + { + "epoch": 1.76, + "learning_rate": 7.409156144150213e-05, + "loss": 1.4548, + "step": 566 + }, + { + "epoch": 1.76, + "learning_rate": 7.377330594122317e-05, + "loss": 1.4478, + "step": 567 + }, + { + "epoch": 1.77, + "learning_rate": 7.34553354489773e-05, + "loss": 1.5048, + "step": 568 + }, + { + "epoch": 1.77, + "learning_rate": 7.31376534201807e-05, + "loss": 1.4889, + "step": 569 + }, + { + "epoch": 1.77, + "learning_rate": 7.282026330711489e-05, + "loss": 1.5045, + "step": 570 + }, + { + "epoch": 1.78, + "learning_rate": 7.250316855888906e-05, + "loss": 1.4352, + "step": 571 + }, + { + "epoch": 1.78, + "learning_rate": 7.218637262140268e-05, + "loss": 1.4881, + "step": 572 + }, + { + "epoch": 1.78, + "learning_rate": 7.186987893730797e-05, + "loss": 1.449, + "step": 573 + }, + { + "epoch": 1.79, + "learning_rate": 7.155369094597253e-05, + "loss": 1.4146, + "step": 574 + }, + { + "epoch": 1.79, + "learning_rate": 7.1237812083442e-05, + "loss": 1.4462, + "step": 575 + }, + { + "epoch": 1.79, + "learning_rate": 7.092224578240269e-05, + "loss": 1.4509, + "step": 576 + }, + { + "epoch": 1.79, + "learning_rate": 7.060699547214427e-05, + "loss": 1.4483, + "step": 577 + }, + { + "epoch": 1.8, + "learning_rate": 7.029206457852247e-05, + "loss": 1.4348, + "step": 578 + }, + { + "epoch": 1.8, + "learning_rate": 6.997745652392191e-05, + "loss": 1.4931, + "step": 579 + }, + { + "epoch": 1.8, + "learning_rate": 6.966317472721897e-05, + "loss": 1.4132, + "step": 580 + }, + { + "epoch": 1.81, + "learning_rate": 6.934922260374437e-05, + "loss": 1.3974, + "step": 581 + }, + { + "epoch": 1.81, + "learning_rate": 6.903560356524641e-05, + "loss": 1.4326, + "step": 582 + }, + { + "epoch": 1.81, + "learning_rate": 6.872232101985363e-05, + "loss": 1.4349, + "step": 583 + }, + { + "epoch": 1.82, + "learning_rate": 6.840937837203791e-05, + "loss": 1.4528, + "step": 584 + }, + { + "epoch": 1.82, + "learning_rate": 6.809677902257742e-05, + "loss": 1.4365, + "step": 585 + }, + { + "epoch": 1.82, + "learning_rate": 6.778452636851968e-05, + "loss": 1.4702, + "step": 586 + }, + { + "epoch": 1.83, + "learning_rate": 6.747262380314463e-05, + "loss": 1.458, + "step": 587 + }, + { + "epoch": 1.83, + "learning_rate": 6.71610747159277e-05, + "loss": 1.5413, + "step": 588 + }, + { + "epoch": 1.83, + "learning_rate": 6.684988249250314e-05, + "loss": 1.4205, + "step": 589 + }, + { + "epoch": 1.84, + "learning_rate": 6.653905051462708e-05, + "loss": 1.4643, + "step": 590 + }, + { + "epoch": 1.84, + "learning_rate": 6.622858216014084e-05, + "loss": 1.4071, + "step": 591 + }, + { + "epoch": 1.84, + "learning_rate": 6.591848080293418e-05, + "loss": 1.4669, + "step": 592 + }, + { + "epoch": 1.84, + "learning_rate": 6.56087498129087e-05, + "loss": 1.5062, + "step": 593 + }, + { + "epoch": 1.85, + "learning_rate": 6.52993925559412e-05, + "loss": 1.4334, + "step": 594 + }, + { + "epoch": 1.85, + "learning_rate": 6.499041239384698e-05, + "loss": 1.4696, + "step": 595 + }, + { + "epoch": 1.85, + "learning_rate": 6.468181268434354e-05, + "loss": 1.4575, + "step": 596 + }, + { + "epoch": 1.86, + "learning_rate": 6.437359678101389e-05, + "loss": 1.4432, + "step": 597 + }, + { + "epoch": 1.86, + "learning_rate": 6.406576803327022e-05, + "loss": 1.5047, + "step": 598 + }, + { + "epoch": 1.86, + "learning_rate": 6.375832978631743e-05, + "loss": 1.4297, + "step": 599 + }, + { + "epoch": 1.87, + "learning_rate": 6.345128538111685e-05, + "loss": 1.461, + "step": 600 + }, + { + "epoch": 1.87, + "eval_loss": 1.6174333095550537, + "eval_runtime": 233.649, + "eval_samples_per_second": 16.362, + "eval_steps_per_second": 4.092, + "step": 600 + }, + { + "epoch": 1.87, + "learning_rate": 6.314463815434988e-05, + "loss": 1.4978, + "step": 601 + }, + { + "epoch": 1.87, + "learning_rate": 6.283839143838169e-05, + "loss": 1.426, + "step": 602 + }, + { + "epoch": 1.88, + "learning_rate": 6.253254856122511e-05, + "loss": 1.4657, + "step": 603 + }, + { + "epoch": 1.88, + "learning_rate": 6.222711284650444e-05, + "loss": 1.5282, + "step": 604 + }, + { + "epoch": 1.88, + "learning_rate": 6.192208761341925e-05, + "loss": 1.4897, + "step": 605 + }, + { + "epoch": 1.88, + "learning_rate": 6.161747617670839e-05, + "loss": 1.4827, + "step": 606 + }, + { + "epoch": 1.89, + "learning_rate": 6.131328184661396e-05, + "loss": 1.4507, + "step": 607 + }, + { + "epoch": 1.89, + "learning_rate": 6.100950792884533e-05, + "loss": 1.4461, + "step": 608 + }, + { + "epoch": 1.89, + "learning_rate": 6.070615772454312e-05, + "loss": 1.4187, + "step": 609 + }, + { + "epoch": 1.9, + "learning_rate": 6.040323453024351e-05, + "loss": 1.4704, + "step": 610 + }, + { + "epoch": 1.9, + "learning_rate": 6.0100741637842316e-05, + "loss": 1.4869, + "step": 611 + }, + { + "epoch": 1.9, + "learning_rate": 5.979868233455917e-05, + "loss": 1.4657, + "step": 612 + }, + { + "epoch": 1.91, + "learning_rate": 5.949705990290186e-05, + "loss": 1.4234, + "step": 613 + }, + { + "epoch": 1.91, + "learning_rate": 5.919587762063072e-05, + "loss": 1.4519, + "step": 614 + }, + { + "epoch": 1.91, + "learning_rate": 5.889513876072283e-05, + "loss": 1.4588, + "step": 615 + }, + { + "epoch": 1.92, + "learning_rate": 5.859484659133663e-05, + "loss": 1.4867, + "step": 616 + }, + { + "epoch": 1.92, + "learning_rate": 5.829500437577626e-05, + "loss": 1.5157, + "step": 617 + }, + { + "epoch": 1.92, + "learning_rate": 5.799561537245628e-05, + "loss": 1.4492, + "step": 618 + }, + { + "epoch": 1.93, + "learning_rate": 5.769668283486607e-05, + "loss": 1.514, + "step": 619 + }, + { + "epoch": 1.93, + "learning_rate": 5.739821001153451e-05, + "loss": 1.5127, + "step": 620 + }, + { + "epoch": 1.93, + "learning_rate": 5.710020014599486e-05, + "loss": 1.4204, + "step": 621 + }, + { + "epoch": 1.93, + "learning_rate": 5.680265647674925e-05, + "loss": 1.4346, + "step": 622 + }, + { + "epoch": 1.94, + "learning_rate": 5.650558223723365e-05, + "loss": 1.4342, + "step": 623 + }, + { + "epoch": 1.94, + "learning_rate": 5.620898065578268e-05, + "loss": 1.4699, + "step": 624 + }, + { + "epoch": 1.94, + "learning_rate": 5.591285495559453e-05, + "loss": 1.5088, + "step": 625 + }, + { + "epoch": 1.95, + "learning_rate": 5.561720835469602e-05, + "loss": 1.5015, + "step": 626 + }, + { + "epoch": 1.95, + "learning_rate": 5.5322044065907475e-05, + "loss": 1.4243, + "step": 627 + }, + { + "epoch": 1.95, + "learning_rate": 5.502736529680785e-05, + "loss": 1.4553, + "step": 628 + }, + { + "epoch": 1.96, + "learning_rate": 5.47331752497001e-05, + "loss": 1.4419, + "step": 629 + }, + { + "epoch": 1.96, + "learning_rate": 5.443947712157587e-05, + "loss": 1.4172, + "step": 630 + }, + { + "epoch": 1.96, + "learning_rate": 5.41462741040814e-05, + "loss": 1.4888, + "step": 631 + }, + { + "epoch": 1.97, + "learning_rate": 5.385356938348234e-05, + "loss": 1.412, + "step": 632 + }, + { + "epoch": 1.97, + "learning_rate": 5.3561366140629274e-05, + "loss": 1.4327, + "step": 633 + }, + { + "epoch": 1.97, + "learning_rate": 5.326966755092334e-05, + "loss": 1.502, + "step": 634 + }, + { + "epoch": 1.98, + "learning_rate": 5.297847678428141e-05, + "loss": 1.4499, + "step": 635 + }, + { + "epoch": 1.98, + "learning_rate": 5.2687797005101834e-05, + "loss": 1.4783, + "step": 636 + }, + { + "epoch": 1.98, + "learning_rate": 5.239763137223004e-05, + "loss": 1.4378, + "step": 637 + }, + { + "epoch": 1.98, + "learning_rate": 5.21079830389241e-05, + "loss": 1.5055, + "step": 638 + }, + { + "epoch": 1.99, + "learning_rate": 5.18188551528207e-05, + "loss": 1.4963, + "step": 639 + }, + { + "epoch": 1.99, + "learning_rate": 5.1530250855900576e-05, + "loss": 1.4799, + "step": 640 + }, + { + "epoch": 1.99, + "learning_rate": 5.124217328445475e-05, + "loss": 1.4388, + "step": 641 + }, + { + "epoch": 2.0, + "learning_rate": 5.095462556905021e-05, + "loss": 1.484, + "step": 642 + }, + { + "epoch": 2.0, + "learning_rate": 5.0667610834495785e-05, + "loss": 1.4811, + "step": 643 + }, + { + "epoch": 2.0, + "learning_rate": 5.03811321998086e-05, + "loss": 1.2941, + "step": 644 + }, + { + "epoch": 2.01, + "learning_rate": 5.009519277817976e-05, + "loss": 1.3975, + "step": 645 + }, + { + "epoch": 2.01, + "learning_rate": 4.9809795676940815e-05, + "loss": 1.3432, + "step": 646 + }, + { + "epoch": 2.01, + "learning_rate": 4.952494399752976e-05, + "loss": 1.3014, + "step": 647 + }, + { + "epoch": 2.02, + "learning_rate": 4.924064083545744e-05, + "loss": 1.3491, + "step": 648 + }, + { + "epoch": 2.02, + "learning_rate": 4.8956889280274056e-05, + "loss": 1.3238, + "step": 649 + }, + { + "epoch": 2.02, + "learning_rate": 4.8673692415535186e-05, + "loss": 1.327, + "step": 650 + }, + { + "epoch": 2.02, + "eval_loss": 1.6340641975402832, + "eval_runtime": 233.6965, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 650 + }, + { + "epoch": 2.02, + "learning_rate": 4.83910533187688e-05, + "loss": 1.3208, + "step": 651 + }, + { + "epoch": 2.03, + "learning_rate": 4.810897506144137e-05, + "loss": 1.2936, + "step": 652 + }, + { + "epoch": 2.03, + "learning_rate": 4.782746070892472e-05, + "loss": 1.323, + "step": 653 + }, + { + "epoch": 2.03, + "learning_rate": 4.754651332046274e-05, + "loss": 1.3304, + "step": 654 + }, + { + "epoch": 2.04, + "learning_rate": 4.726613594913796e-05, + "loss": 1.2426, + "step": 655 + }, + { + "epoch": 2.04, + "learning_rate": 4.698633164183853e-05, + "loss": 1.2882, + "step": 656 + }, + { + "epoch": 2.04, + "learning_rate": 4.670710343922504e-05, + "loss": 1.3273, + "step": 657 + }, + { + "epoch": 2.05, + "learning_rate": 4.6428454375697485e-05, + "loss": 1.3391, + "step": 658 + }, + { + "epoch": 2.05, + "learning_rate": 4.615038747936237e-05, + "loss": 1.3143, + "step": 659 + }, + { + "epoch": 2.05, + "learning_rate": 4.587290577199965e-05, + "loss": 1.2846, + "step": 660 + }, + { + "epoch": 2.06, + "learning_rate": 4.559601226902998e-05, + "loss": 1.2887, + "step": 661 + }, + { + "epoch": 2.06, + "learning_rate": 4.531970997948203e-05, + "loss": 1.3239, + "step": 662 + }, + { + "epoch": 2.06, + "learning_rate": 4.504400190595958e-05, + "loss": 1.3552, + "step": 663 + }, + { + "epoch": 2.07, + "learning_rate": 4.476889104460907e-05, + "loss": 1.3554, + "step": 664 + }, + { + "epoch": 2.07, + "learning_rate": 4.4494380385086986e-05, + "loss": 1.3333, + "step": 665 + }, + { + "epoch": 2.07, + "learning_rate": 4.422047291052728e-05, + "loss": 1.3107, + "step": 666 + }, + { + "epoch": 2.07, + "learning_rate": 4.3947171597509176e-05, + "loss": 1.3228, + "step": 667 + }, + { + "epoch": 2.08, + "learning_rate": 4.367447941602453e-05, + "loss": 1.3224, + "step": 668 + }, + { + "epoch": 2.08, + "learning_rate": 4.3402399329445855e-05, + "loss": 1.2844, + "step": 669 + }, + { + "epoch": 2.08, + "learning_rate": 4.3130934294493885e-05, + "loss": 1.3352, + "step": 670 + }, + { + "epoch": 2.09, + "learning_rate": 4.286008726120543e-05, + "loss": 1.3217, + "step": 671 + }, + { + "epoch": 2.09, + "learning_rate": 4.2589861172901634e-05, + "loss": 1.2976, + "step": 672 + }, + { + "epoch": 2.09, + "learning_rate": 4.232025896615559e-05, + "loss": 1.3108, + "step": 673 + }, + { + "epoch": 2.1, + "learning_rate": 4.2051283570760746e-05, + "loss": 1.2893, + "step": 674 + }, + { + "epoch": 2.1, + "learning_rate": 4.178293790969883e-05, + "loss": 1.3452, + "step": 675 + }, + { + "epoch": 2.1, + "learning_rate": 4.1515224899108164e-05, + "loss": 1.332, + "step": 676 + }, + { + "epoch": 2.11, + "learning_rate": 4.1248147448252185e-05, + "loss": 1.2998, + "step": 677 + }, + { + "epoch": 2.11, + "learning_rate": 4.098170845948736e-05, + "loss": 1.2952, + "step": 678 + }, + { + "epoch": 2.11, + "learning_rate": 4.071591082823215e-05, + "loss": 1.3512, + "step": 679 + }, + { + "epoch": 2.12, + "learning_rate": 4.045075744293525e-05, + "loss": 1.3571, + "step": 680 + }, + { + "epoch": 2.12, + "learning_rate": 4.01862511850442e-05, + "loss": 1.3415, + "step": 681 + }, + { + "epoch": 2.12, + "learning_rate": 3.992239492897429e-05, + "loss": 1.3264, + "step": 682 + }, + { + "epoch": 2.12, + "learning_rate": 3.965919154207708e-05, + "loss": 1.3013, + "step": 683 + }, + { + "epoch": 2.13, + "learning_rate": 3.939664388460932e-05, + "loss": 1.369, + "step": 684 + }, + { + "epoch": 2.13, + "learning_rate": 3.913475480970193e-05, + "loss": 1.2464, + "step": 685 + }, + { + "epoch": 2.13, + "learning_rate": 3.887352716332892e-05, + "loss": 1.3162, + "step": 686 + }, + { + "epoch": 2.14, + "learning_rate": 3.861296378427656e-05, + "loss": 1.3221, + "step": 687 + }, + { + "epoch": 2.14, + "learning_rate": 3.835306750411237e-05, + "loss": 1.3219, + "step": 688 + }, + { + "epoch": 2.14, + "learning_rate": 3.8093841147154475e-05, + "loss": 1.3446, + "step": 689 + }, + { + "epoch": 2.15, + "learning_rate": 3.783528753044093e-05, + "loss": 1.3667, + "step": 690 + }, + { + "epoch": 2.15, + "learning_rate": 3.757740946369901e-05, + "loss": 1.3098, + "step": 691 + }, + { + "epoch": 2.15, + "learning_rate": 3.732020974931471e-05, + "loss": 1.3017, + "step": 692 + }, + { + "epoch": 2.16, + "learning_rate": 3.7063691182302304e-05, + "loss": 1.3354, + "step": 693 + }, + { + "epoch": 2.16, + "learning_rate": 3.680785655027399e-05, + "loss": 1.3081, + "step": 694 + }, + { + "epoch": 2.16, + "learning_rate": 3.6552708633409613e-05, + "loss": 1.2563, + "step": 695 + }, + { + "epoch": 2.16, + "learning_rate": 3.6298250204426334e-05, + "loss": 1.307, + "step": 696 + }, + { + "epoch": 2.17, + "learning_rate": 3.6044484028548676e-05, + "loss": 1.2907, + "step": 697 + }, + { + "epoch": 2.17, + "learning_rate": 3.5791412863478326e-05, + "loss": 1.3023, + "step": 698 + }, + { + "epoch": 2.17, + "learning_rate": 3.553903945936421e-05, + "loss": 1.3144, + "step": 699 + }, + { + "epoch": 2.18, + "learning_rate": 3.528736655877264e-05, + "loss": 1.3015, + "step": 700 + }, + { + "epoch": 2.18, + "eval_loss": 1.6665308475494385, + "eval_runtime": 233.6943, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 700 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 1.9629901602619392e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-700/training_args.bin b/checkpoint-700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-750/README.md b/checkpoint-750/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-750/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-750/adapter_config.json b/checkpoint-750/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-750/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-750/adapter_model.bin b/checkpoint-750/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..05e1c7365d37fdd9864d4f802c2eb14551031476 --- /dev/null +++ b/checkpoint-750/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e5f456c577f18c92c04a0f21fe2e7f24ab838b03b8052f80399ce8b7f783e6fc +size 84046925 diff --git a/checkpoint-750/optimizer.pt b/checkpoint-750/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..c98deabc19b64b4987d87d6cafc39cc7432a5573 --- /dev/null +++ b/checkpoint-750/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a7ece82264866f5d750d8ea42790f37306afcca85a923d6bc1a61a70fc3263a7 +size 168039557 diff --git a/checkpoint-750/rng_state.pth b/checkpoint-750/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..507bb0e79e29958a74e9ce51944b5c55af3c69c1 --- /dev/null +++ b/checkpoint-750/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77a0b3f951971fc4a8d2332ab96a1e91133d0ec7ed5576844667f12237498b0a +size 14575 diff --git a/checkpoint-750/scheduler.pt b/checkpoint-750/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..c60c978f1c9fa714d2918b172b918794bc8ebcd9 --- /dev/null +++ b/checkpoint-750/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:225849cee91323df73b8336a8808807e9d67ba74f7f184c83345b2dbfaaf41db +size 627 diff --git a/checkpoint-750/trainer_state.json b/checkpoint-750/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..a3e5600d62b4a5dc9c4aee3283880f18618c6d92 --- /dev/null +++ b/checkpoint-750/trainer_state.json @@ -0,0 +1,4639 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 2.332814930015552, + "eval_steps": 50, + "global_step": 750, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012781362737859735, + "loss": 1.4867, + "step": 401 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012749683144111095, + "loss": 1.4923, + "step": 402 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012717973669288513, + "loss": 1.4858, + "step": 403 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012686234657981933, + "loss": 1.4464, + "step": 404 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012654466455102272, + "loss": 1.4598, + "step": 405 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012622669405877685, + "loss": 1.4237, + "step": 406 + }, + { + "epoch": 1.27, + "learning_rate": 0.0001259084385584979, + "loss": 1.475, + "step": 407 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012558990150869935, + "loss": 1.5201, + "step": 408 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012527108637095427, + "loss": 1.4735, + "step": 409 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012495199660985767, + "loss": 1.4676, + "step": 410 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012463263569298914, + "loss": 1.4671, + "step": 411 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012431300709087468, + "loss": 1.4724, + "step": 412 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012399311427694945, + "loss": 1.5451, + "step": 413 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001236729607275197, + "loss": 1.492, + "step": 414 + }, + { + "epoch": 1.29, + "learning_rate": 0.00012335254992172512, + "loss": 1.5186, + "step": 415 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001230318853415012, + "loss": 1.4622, + "step": 416 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012271097047154096, + "loss": 1.4937, + "step": 417 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012238980879925756, + "loss": 1.4575, + "step": 418 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012206840381474608, + "loss": 1.4801, + "step": 419 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012174675901074577, + "loss": 1.4523, + "step": 420 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012142487788260191, + "loss": 1.4957, + "step": 421 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012110276392822799, + "loss": 1.4757, + "step": 422 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001207804206480677, + "loss": 1.4769, + "step": 423 + }, + { + "epoch": 1.32, + "learning_rate": 0.00012045785154505676, + "loss": 1.4435, + "step": 424 + }, + { + "epoch": 1.32, + "learning_rate": 0.000120135060124585, + "loss": 1.5211, + "step": 425 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011981204989445811, + "loss": 1.4248, + "step": 426 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011948882436485969, + "loss": 1.4883, + "step": 427 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011916538704831293, + "loss": 1.4919, + "step": 428 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011884174145964262, + "loss": 1.4689, + "step": 429 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011851789111593688, + "loss": 1.4071, + "step": 430 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011819383953650874, + "loss": 1.4418, + "step": 431 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011786959024285826, + "loss": 1.5206, + "step": 432 + }, + { + "epoch": 1.35, + "learning_rate": 0.00011754514675863408, + "loss": 1.446, + "step": 433 + }, + { + "epoch": 1.35, + "learning_rate": 0.000117220512609595, + "loss": 1.5165, + "step": 434 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001168956913235719, + "loss": 1.4119, + "step": 435 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011657068643042924, + "loss": 1.503, + "step": 436 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011624550146202682, + "loss": 1.4573, + "step": 437 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011592013995218123, + "loss": 1.4707, + "step": 438 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011559460543662768, + "loss": 1.4304, + "step": 439 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011526890145298137, + "loss": 1.4465, + "step": 440 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001149430315406991, + "loss": 1.4912, + "step": 441 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001146169992410409, + "loss": 1.4549, + "step": 442 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011429080809703145, + "loss": 1.4528, + "step": 443 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011396446165342165, + "loss": 1.4148, + "step": 444 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011363796345665001, + "loss": 1.467, + "step": 445 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001133113170548041, + "loss": 1.492, + "step": 446 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011298452599758217, + "loss": 1.5244, + "step": 447 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011265759383625436, + "loss": 1.4553, + "step": 448 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001123305241236243, + "loss": 1.4764, + "step": 449 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011200332041399027, + "loss": 1.4354, + "step": 450 + }, + { + "epoch": 1.4, + "eval_loss": 1.6193681955337524, + "eval_runtime": 233.6751, + "eval_samples_per_second": 16.36, + "eval_steps_per_second": 4.091, + "step": 450 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011167598626310682, + "loss": 1.4946, + "step": 451 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011134852522814596, + "loss": 1.4558, + "step": 452 + }, + { + "epoch": 1.41, + "learning_rate": 0.0001110209408676586, + "loss": 1.4549, + "step": 453 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011069323674153585, + "loss": 1.4992, + "step": 454 + }, + { + "epoch": 1.42, + "learning_rate": 0.0001103654164109702, + "loss": 1.4828, + "step": 455 + }, + { + "epoch": 1.42, + "learning_rate": 0.00011003748343841711, + "loss": 1.4939, + "step": 456 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010970944138755604, + "loss": 1.4761, + "step": 457 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010938129382325184, + "loss": 1.4394, + "step": 458 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010905304431151602, + "loss": 1.4852, + "step": 459 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010872469641946783, + "loss": 1.4479, + "step": 460 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010839625371529583, + "loss": 1.5161, + "step": 461 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010806771976821872, + "loss": 1.5104, + "step": 462 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001077390981484469, + "loss": 1.5056, + "step": 463 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010741039242714337, + "loss": 1.4919, + "step": 464 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010708160617638521, + "loss": 1.4605, + "step": 465 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010675274296912452, + "loss": 1.5191, + "step": 466 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010642380637914975, + "loss": 1.4504, + "step": 467 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010609479998104684, + "loss": 1.4619, + "step": 468 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010576572735016016, + "loss": 1.4619, + "step": 469 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010543659206255409, + "loss": 1.4962, + "step": 470 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010510739769497378, + "loss": 1.4901, + "step": 471 + }, + { + "epoch": 1.47, + "learning_rate": 0.0001047781478248063, + "loss": 1.4708, + "step": 472 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010444884603004213, + "loss": 1.4756, + "step": 473 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010411949588923577, + "loss": 1.3948, + "step": 474 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010379010098146728, + "loss": 1.5183, + "step": 475 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010346066488630308, + "loss": 1.4252, + "step": 476 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010313119118375727, + "loss": 1.4686, + "step": 477 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010280168345425256, + "loss": 1.5285, + "step": 478 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010247214527858149, + "loss": 1.4649, + "step": 479 + }, + { + "epoch": 1.49, + "learning_rate": 0.0001021425802378674, + "loss": 1.4602, + "step": 480 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010181299191352566, + "loss": 1.5102, + "step": 481 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010148338388722465, + "loss": 1.4894, + "step": 482 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010115375974084677, + "loss": 1.501, + "step": 483 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010082412305644964, + "loss": 1.481, + "step": 484 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010049447741622717, + "loss": 1.4927, + "step": 485 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010016482640247058, + "loss": 1.512, + "step": 486 + }, + { + "epoch": 1.51, + "learning_rate": 9.983517359752945e-05, + "loss": 1.4622, + "step": 487 + }, + { + "epoch": 1.52, + "learning_rate": 9.950552258377284e-05, + "loss": 1.4956, + "step": 488 + }, + { + "epoch": 1.52, + "learning_rate": 9.917587694355037e-05, + "loss": 1.493, + "step": 489 + }, + { + "epoch": 1.52, + "learning_rate": 9.884624025915328e-05, + "loss": 1.4629, + "step": 490 + }, + { + "epoch": 1.53, + "learning_rate": 9.851661611277537e-05, + "loss": 1.4531, + "step": 491 + }, + { + "epoch": 1.53, + "learning_rate": 9.818700808647435e-05, + "loss": 1.4656, + "step": 492 + }, + { + "epoch": 1.53, + "learning_rate": 9.785741976213261e-05, + "loss": 1.4982, + "step": 493 + }, + { + "epoch": 1.54, + "learning_rate": 9.752785472141854e-05, + "loss": 1.5053, + "step": 494 + }, + { + "epoch": 1.54, + "learning_rate": 9.719831654574745e-05, + "loss": 1.4619, + "step": 495 + }, + { + "epoch": 1.54, + "learning_rate": 9.686880881624275e-05, + "loss": 1.486, + "step": 496 + }, + { + "epoch": 1.55, + "learning_rate": 9.653933511369696e-05, + "loss": 1.4788, + "step": 497 + }, + { + "epoch": 1.55, + "learning_rate": 9.620989901853275e-05, + "loss": 1.4663, + "step": 498 + }, + { + "epoch": 1.55, + "learning_rate": 9.588050411076424e-05, + "loss": 1.5138, + "step": 499 + }, + { + "epoch": 1.56, + "learning_rate": 9.555115396995788e-05, + "loss": 1.4427, + "step": 500 + }, + { + "epoch": 1.56, + "eval_loss": 1.6187018156051636, + "eval_runtime": 233.6591, + "eval_samples_per_second": 16.361, + "eval_steps_per_second": 4.091, + "step": 500 + }, + { + "epoch": 1.56, + "learning_rate": 9.522185217519371e-05, + "loss": 1.4696, + "step": 501 + }, + { + "epoch": 1.56, + "learning_rate": 9.489260230502626e-05, + "loss": 1.4052, + "step": 502 + }, + { + "epoch": 1.56, + "learning_rate": 9.45634079374459e-05, + "loss": 1.4688, + "step": 503 + }, + { + "epoch": 1.57, + "learning_rate": 9.423427264983986e-05, + "loss": 1.4266, + "step": 504 + }, + { + "epoch": 1.57, + "learning_rate": 9.390520001895321e-05, + "loss": 1.4887, + "step": 505 + }, + { + "epoch": 1.57, + "learning_rate": 9.357619362085027e-05, + "loss": 1.4992, + "step": 506 + }, + { + "epoch": 1.58, + "learning_rate": 9.32472570308755e-05, + "loss": 1.4626, + "step": 507 + }, + { + "epoch": 1.58, + "learning_rate": 9.291839382361481e-05, + "loss": 1.4984, + "step": 508 + }, + { + "epoch": 1.58, + "learning_rate": 9.258960757285664e-05, + "loss": 1.3692, + "step": 509 + }, + { + "epoch": 1.59, + "learning_rate": 9.226090185155314e-05, + "loss": 1.4325, + "step": 510 + }, + { + "epoch": 1.59, + "learning_rate": 9.19322802317813e-05, + "loss": 1.5049, + "step": 511 + }, + { + "epoch": 1.59, + "learning_rate": 9.160374628470421e-05, + "loss": 1.4589, + "step": 512 + }, + { + "epoch": 1.6, + "learning_rate": 9.127530358053218e-05, + "loss": 1.4291, + "step": 513 + }, + { + "epoch": 1.6, + "learning_rate": 9.094695568848402e-05, + "loss": 1.4474, + "step": 514 + }, + { + "epoch": 1.6, + "learning_rate": 9.061870617674817e-05, + "loss": 1.513, + "step": 515 + }, + { + "epoch": 1.6, + "learning_rate": 9.029055861244397e-05, + "loss": 1.4609, + "step": 516 + }, + { + "epoch": 1.61, + "learning_rate": 8.99625165615829e-05, + "loss": 1.5144, + "step": 517 + }, + { + "epoch": 1.61, + "learning_rate": 8.963458358902985e-05, + "loss": 1.4294, + "step": 518 + }, + { + "epoch": 1.61, + "learning_rate": 8.93067632584642e-05, + "loss": 1.4516, + "step": 519 + }, + { + "epoch": 1.62, + "learning_rate": 8.897905913234143e-05, + "loss": 1.4659, + "step": 520 + }, + { + "epoch": 1.62, + "learning_rate": 8.865147477185405e-05, + "loss": 1.4787, + "step": 521 + }, + { + "epoch": 1.62, + "learning_rate": 8.832401373689319e-05, + "loss": 1.4601, + "step": 522 + }, + { + "epoch": 1.63, + "learning_rate": 8.799667958600973e-05, + "loss": 1.4955, + "step": 523 + }, + { + "epoch": 1.63, + "learning_rate": 8.766947587637573e-05, + "loss": 1.4231, + "step": 524 + }, + { + "epoch": 1.63, + "learning_rate": 8.734240616374565e-05, + "loss": 1.4952, + "step": 525 + }, + { + "epoch": 1.64, + "learning_rate": 8.701547400241788e-05, + "loss": 1.4707, + "step": 526 + }, + { + "epoch": 1.64, + "learning_rate": 8.668868294519593e-05, + "loss": 1.5023, + "step": 527 + }, + { + "epoch": 1.64, + "learning_rate": 8.636203654335002e-05, + "loss": 1.4702, + "step": 528 + }, + { + "epoch": 1.65, + "learning_rate": 8.603553834657836e-05, + "loss": 1.4399, + "step": 529 + }, + { + "epoch": 1.65, + "learning_rate": 8.570919190296855e-05, + "loss": 1.5175, + "step": 530 + }, + { + "epoch": 1.65, + "learning_rate": 8.53830007589591e-05, + "loss": 1.4715, + "step": 531 + }, + { + "epoch": 1.65, + "learning_rate": 8.505696845930096e-05, + "loss": 1.5292, + "step": 532 + }, + { + "epoch": 1.66, + "learning_rate": 8.473109854701869e-05, + "loss": 1.5287, + "step": 533 + }, + { + "epoch": 1.66, + "learning_rate": 8.440539456337235e-05, + "loss": 1.4762, + "step": 534 + }, + { + "epoch": 1.66, + "learning_rate": 8.407986004781879e-05, + "loss": 1.4536, + "step": 535 + }, + { + "epoch": 1.67, + "learning_rate": 8.375449853797322e-05, + "loss": 1.5018, + "step": 536 + }, + { + "epoch": 1.67, + "learning_rate": 8.342931356957076e-05, + "loss": 1.4723, + "step": 537 + }, + { + "epoch": 1.67, + "learning_rate": 8.310430867642812e-05, + "loss": 1.4905, + "step": 538 + }, + { + "epoch": 1.68, + "learning_rate": 8.277948739040503e-05, + "loss": 1.4651, + "step": 539 + }, + { + "epoch": 1.68, + "learning_rate": 8.245485324136597e-05, + "loss": 1.4482, + "step": 540 + }, + { + "epoch": 1.68, + "learning_rate": 8.213040975714175e-05, + "loss": 1.3977, + "step": 541 + }, + { + "epoch": 1.69, + "learning_rate": 8.180616046349129e-05, + "loss": 1.5594, + "step": 542 + }, + { + "epoch": 1.69, + "learning_rate": 8.148210888406316e-05, + "loss": 1.4995, + "step": 543 + }, + { + "epoch": 1.69, + "learning_rate": 8.115825854035737e-05, + "loss": 1.5106, + "step": 544 + }, + { + "epoch": 1.7, + "learning_rate": 8.083461295168707e-05, + "loss": 1.4219, + "step": 545 + }, + { + "epoch": 1.7, + "learning_rate": 8.051117563514036e-05, + "loss": 1.4766, + "step": 546 + }, + { + "epoch": 1.7, + "learning_rate": 8.018795010554193e-05, + "loss": 1.5241, + "step": 547 + }, + { + "epoch": 1.7, + "learning_rate": 7.986493987541502e-05, + "loss": 1.4673, + "step": 548 + }, + { + "epoch": 1.71, + "learning_rate": 7.954214845494325e-05, + "loss": 1.4236, + "step": 549 + }, + { + "epoch": 1.71, + "learning_rate": 7.921957935193232e-05, + "loss": 1.4687, + "step": 550 + }, + { + "epoch": 1.71, + "eval_loss": 1.617763876914978, + "eval_runtime": 233.6334, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 550 + }, + { + "epoch": 1.71, + "learning_rate": 7.889723607177202e-05, + "loss": 1.4412, + "step": 551 + }, + { + "epoch": 1.72, + "learning_rate": 7.857512211739813e-05, + "loss": 1.4464, + "step": 552 + }, + { + "epoch": 1.72, + "learning_rate": 7.825324098925427e-05, + "loss": 1.4043, + "step": 553 + }, + { + "epoch": 1.72, + "learning_rate": 7.793159618525393e-05, + "loss": 1.4384, + "step": 554 + }, + { + "epoch": 1.73, + "learning_rate": 7.761019120074245e-05, + "loss": 1.4781, + "step": 555 + }, + { + "epoch": 1.73, + "learning_rate": 7.728902952845905e-05, + "loss": 1.4311, + "step": 556 + }, + { + "epoch": 1.73, + "learning_rate": 7.696811465849883e-05, + "loss": 1.4926, + "step": 557 + }, + { + "epoch": 1.74, + "learning_rate": 7.664745007827489e-05, + "loss": 1.4739, + "step": 558 + }, + { + "epoch": 1.74, + "learning_rate": 7.632703927248033e-05, + "loss": 1.509, + "step": 559 + }, + { + "epoch": 1.74, + "learning_rate": 7.60068857230506e-05, + "loss": 1.4555, + "step": 560 + }, + { + "epoch": 1.74, + "learning_rate": 7.568699290912533e-05, + "loss": 1.4588, + "step": 561 + }, + { + "epoch": 1.75, + "learning_rate": 7.536736430701088e-05, + "loss": 1.4574, + "step": 562 + }, + { + "epoch": 1.75, + "learning_rate": 7.504800339014232e-05, + "loss": 1.4805, + "step": 563 + }, + { + "epoch": 1.75, + "learning_rate": 7.472891362904577e-05, + "loss": 1.5081, + "step": 564 + }, + { + "epoch": 1.76, + "learning_rate": 7.441009849130067e-05, + "loss": 1.5081, + "step": 565 + }, + { + "epoch": 1.76, + "learning_rate": 7.409156144150213e-05, + "loss": 1.4548, + "step": 566 + }, + { + "epoch": 1.76, + "learning_rate": 7.377330594122317e-05, + "loss": 1.4478, + "step": 567 + }, + { + "epoch": 1.77, + "learning_rate": 7.34553354489773e-05, + "loss": 1.5048, + "step": 568 + }, + { + "epoch": 1.77, + "learning_rate": 7.31376534201807e-05, + "loss": 1.4889, + "step": 569 + }, + { + "epoch": 1.77, + "learning_rate": 7.282026330711489e-05, + "loss": 1.5045, + "step": 570 + }, + { + "epoch": 1.78, + "learning_rate": 7.250316855888906e-05, + "loss": 1.4352, + "step": 571 + }, + { + "epoch": 1.78, + "learning_rate": 7.218637262140268e-05, + "loss": 1.4881, + "step": 572 + }, + { + "epoch": 1.78, + "learning_rate": 7.186987893730797e-05, + "loss": 1.449, + "step": 573 + }, + { + "epoch": 1.79, + "learning_rate": 7.155369094597253e-05, + "loss": 1.4146, + "step": 574 + }, + { + "epoch": 1.79, + "learning_rate": 7.1237812083442e-05, + "loss": 1.4462, + "step": 575 + }, + { + "epoch": 1.79, + "learning_rate": 7.092224578240269e-05, + "loss": 1.4509, + "step": 576 + }, + { + "epoch": 1.79, + "learning_rate": 7.060699547214427e-05, + "loss": 1.4483, + "step": 577 + }, + { + "epoch": 1.8, + "learning_rate": 7.029206457852247e-05, + "loss": 1.4348, + "step": 578 + }, + { + "epoch": 1.8, + "learning_rate": 6.997745652392191e-05, + "loss": 1.4931, + "step": 579 + }, + { + "epoch": 1.8, + "learning_rate": 6.966317472721897e-05, + "loss": 1.4132, + "step": 580 + }, + { + "epoch": 1.81, + "learning_rate": 6.934922260374437e-05, + "loss": 1.3974, + "step": 581 + }, + { + "epoch": 1.81, + "learning_rate": 6.903560356524641e-05, + "loss": 1.4326, + "step": 582 + }, + { + "epoch": 1.81, + "learning_rate": 6.872232101985363e-05, + "loss": 1.4349, + "step": 583 + }, + { + "epoch": 1.82, + "learning_rate": 6.840937837203791e-05, + "loss": 1.4528, + "step": 584 + }, + { + "epoch": 1.82, + "learning_rate": 6.809677902257742e-05, + "loss": 1.4365, + "step": 585 + }, + { + "epoch": 1.82, + "learning_rate": 6.778452636851968e-05, + "loss": 1.4702, + "step": 586 + }, + { + "epoch": 1.83, + "learning_rate": 6.747262380314463e-05, + "loss": 1.458, + "step": 587 + }, + { + "epoch": 1.83, + "learning_rate": 6.71610747159277e-05, + "loss": 1.5413, + "step": 588 + }, + { + "epoch": 1.83, + "learning_rate": 6.684988249250314e-05, + "loss": 1.4205, + "step": 589 + }, + { + "epoch": 1.84, + "learning_rate": 6.653905051462708e-05, + "loss": 1.4643, + "step": 590 + }, + { + "epoch": 1.84, + "learning_rate": 6.622858216014084e-05, + "loss": 1.4071, + "step": 591 + }, + { + "epoch": 1.84, + "learning_rate": 6.591848080293418e-05, + "loss": 1.4669, + "step": 592 + }, + { + "epoch": 1.84, + "learning_rate": 6.56087498129087e-05, + "loss": 1.5062, + "step": 593 + }, + { + "epoch": 1.85, + "learning_rate": 6.52993925559412e-05, + "loss": 1.4334, + "step": 594 + }, + { + "epoch": 1.85, + "learning_rate": 6.499041239384698e-05, + "loss": 1.4696, + "step": 595 + }, + { + "epoch": 1.85, + "learning_rate": 6.468181268434354e-05, + "loss": 1.4575, + "step": 596 + }, + { + "epoch": 1.86, + "learning_rate": 6.437359678101389e-05, + "loss": 1.4432, + "step": 597 + }, + { + "epoch": 1.86, + "learning_rate": 6.406576803327022e-05, + "loss": 1.5047, + "step": 598 + }, + { + "epoch": 1.86, + "learning_rate": 6.375832978631743e-05, + "loss": 1.4297, + "step": 599 + }, + { + "epoch": 1.87, + "learning_rate": 6.345128538111685e-05, + "loss": 1.461, + "step": 600 + }, + { + "epoch": 1.87, + "eval_loss": 1.6174333095550537, + "eval_runtime": 233.649, + "eval_samples_per_second": 16.362, + "eval_steps_per_second": 4.092, + "step": 600 + }, + { + "epoch": 1.87, + "learning_rate": 6.314463815434988e-05, + "loss": 1.4978, + "step": 601 + }, + { + "epoch": 1.87, + "learning_rate": 6.283839143838169e-05, + "loss": 1.426, + "step": 602 + }, + { + "epoch": 1.88, + "learning_rate": 6.253254856122511e-05, + "loss": 1.4657, + "step": 603 + }, + { + "epoch": 1.88, + "learning_rate": 6.222711284650444e-05, + "loss": 1.5282, + "step": 604 + }, + { + "epoch": 1.88, + "learning_rate": 6.192208761341925e-05, + "loss": 1.4897, + "step": 605 + }, + { + "epoch": 1.88, + "learning_rate": 6.161747617670839e-05, + "loss": 1.4827, + "step": 606 + }, + { + "epoch": 1.89, + "learning_rate": 6.131328184661396e-05, + "loss": 1.4507, + "step": 607 + }, + { + "epoch": 1.89, + "learning_rate": 6.100950792884533e-05, + "loss": 1.4461, + "step": 608 + }, + { + "epoch": 1.89, + "learning_rate": 6.070615772454312e-05, + "loss": 1.4187, + "step": 609 + }, + { + "epoch": 1.9, + "learning_rate": 6.040323453024351e-05, + "loss": 1.4704, + "step": 610 + }, + { + "epoch": 1.9, + "learning_rate": 6.0100741637842316e-05, + "loss": 1.4869, + "step": 611 + }, + { + "epoch": 1.9, + "learning_rate": 5.979868233455917e-05, + "loss": 1.4657, + "step": 612 + }, + { + "epoch": 1.91, + "learning_rate": 5.949705990290186e-05, + "loss": 1.4234, + "step": 613 + }, + { + "epoch": 1.91, + "learning_rate": 5.919587762063072e-05, + "loss": 1.4519, + "step": 614 + }, + { + "epoch": 1.91, + "learning_rate": 5.889513876072283e-05, + "loss": 1.4588, + "step": 615 + }, + { + "epoch": 1.92, + "learning_rate": 5.859484659133663e-05, + "loss": 1.4867, + "step": 616 + }, + { + "epoch": 1.92, + "learning_rate": 5.829500437577626e-05, + "loss": 1.5157, + "step": 617 + }, + { + "epoch": 1.92, + "learning_rate": 5.799561537245628e-05, + "loss": 1.4492, + "step": 618 + }, + { + "epoch": 1.93, + "learning_rate": 5.769668283486607e-05, + "loss": 1.514, + "step": 619 + }, + { + "epoch": 1.93, + "learning_rate": 5.739821001153451e-05, + "loss": 1.5127, + "step": 620 + }, + { + "epoch": 1.93, + "learning_rate": 5.710020014599486e-05, + "loss": 1.4204, + "step": 621 + }, + { + "epoch": 1.93, + "learning_rate": 5.680265647674925e-05, + "loss": 1.4346, + "step": 622 + }, + { + "epoch": 1.94, + "learning_rate": 5.650558223723365e-05, + "loss": 1.4342, + "step": 623 + }, + { + "epoch": 1.94, + "learning_rate": 5.620898065578268e-05, + "loss": 1.4699, + "step": 624 + }, + { + "epoch": 1.94, + "learning_rate": 5.591285495559453e-05, + "loss": 1.5088, + "step": 625 + }, + { + "epoch": 1.95, + "learning_rate": 5.561720835469602e-05, + "loss": 1.5015, + "step": 626 + }, + { + "epoch": 1.95, + "learning_rate": 5.5322044065907475e-05, + "loss": 1.4243, + "step": 627 + }, + { + "epoch": 1.95, + "learning_rate": 5.502736529680785e-05, + "loss": 1.4553, + "step": 628 + }, + { + "epoch": 1.96, + "learning_rate": 5.47331752497001e-05, + "loss": 1.4419, + "step": 629 + }, + { + "epoch": 1.96, + "learning_rate": 5.443947712157587e-05, + "loss": 1.4172, + "step": 630 + }, + { + "epoch": 1.96, + "learning_rate": 5.41462741040814e-05, + "loss": 1.4888, + "step": 631 + }, + { + "epoch": 1.97, + "learning_rate": 5.385356938348234e-05, + "loss": 1.412, + "step": 632 + }, + { + "epoch": 1.97, + "learning_rate": 5.3561366140629274e-05, + "loss": 1.4327, + "step": 633 + }, + { + "epoch": 1.97, + "learning_rate": 5.326966755092334e-05, + "loss": 1.502, + "step": 634 + }, + { + "epoch": 1.98, + "learning_rate": 5.297847678428141e-05, + "loss": 1.4499, + "step": 635 + }, + { + "epoch": 1.98, + "learning_rate": 5.2687797005101834e-05, + "loss": 1.4783, + "step": 636 + }, + { + "epoch": 1.98, + "learning_rate": 5.239763137223004e-05, + "loss": 1.4378, + "step": 637 + }, + { + "epoch": 1.98, + "learning_rate": 5.21079830389241e-05, + "loss": 1.5055, + "step": 638 + }, + { + "epoch": 1.99, + "learning_rate": 5.18188551528207e-05, + "loss": 1.4963, + "step": 639 + }, + { + "epoch": 1.99, + "learning_rate": 5.1530250855900576e-05, + "loss": 1.4799, + "step": 640 + }, + { + "epoch": 1.99, + "learning_rate": 5.124217328445475e-05, + "loss": 1.4388, + "step": 641 + }, + { + "epoch": 2.0, + "learning_rate": 5.095462556905021e-05, + "loss": 1.484, + "step": 642 + }, + { + "epoch": 2.0, + "learning_rate": 5.0667610834495785e-05, + "loss": 1.4811, + "step": 643 + }, + { + "epoch": 2.0, + "learning_rate": 5.03811321998086e-05, + "loss": 1.2941, + "step": 644 + }, + { + "epoch": 2.01, + "learning_rate": 5.009519277817976e-05, + "loss": 1.3975, + "step": 645 + }, + { + "epoch": 2.01, + "learning_rate": 4.9809795676940815e-05, + "loss": 1.3432, + "step": 646 + }, + { + "epoch": 2.01, + "learning_rate": 4.952494399752976e-05, + "loss": 1.3014, + "step": 647 + }, + { + "epoch": 2.02, + "learning_rate": 4.924064083545744e-05, + "loss": 1.3491, + "step": 648 + }, + { + "epoch": 2.02, + "learning_rate": 4.8956889280274056e-05, + "loss": 1.3238, + "step": 649 + }, + { + "epoch": 2.02, + "learning_rate": 4.8673692415535186e-05, + "loss": 1.327, + "step": 650 + }, + { + "epoch": 2.02, + "eval_loss": 1.6340641975402832, + "eval_runtime": 233.6965, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 650 + }, + { + "epoch": 2.02, + "learning_rate": 4.83910533187688e-05, + "loss": 1.3208, + "step": 651 + }, + { + "epoch": 2.03, + "learning_rate": 4.810897506144137e-05, + "loss": 1.2936, + "step": 652 + }, + { + "epoch": 2.03, + "learning_rate": 4.782746070892472e-05, + "loss": 1.323, + "step": 653 + }, + { + "epoch": 2.03, + "learning_rate": 4.754651332046274e-05, + "loss": 1.3304, + "step": 654 + }, + { + "epoch": 2.04, + "learning_rate": 4.726613594913796e-05, + "loss": 1.2426, + "step": 655 + }, + { + "epoch": 2.04, + "learning_rate": 4.698633164183853e-05, + "loss": 1.2882, + "step": 656 + }, + { + "epoch": 2.04, + "learning_rate": 4.670710343922504e-05, + "loss": 1.3273, + "step": 657 + }, + { + "epoch": 2.05, + "learning_rate": 4.6428454375697485e-05, + "loss": 1.3391, + "step": 658 + }, + { + "epoch": 2.05, + "learning_rate": 4.615038747936237e-05, + "loss": 1.3143, + "step": 659 + }, + { + "epoch": 2.05, + "learning_rate": 4.587290577199965e-05, + "loss": 1.2846, + "step": 660 + }, + { + "epoch": 2.06, + "learning_rate": 4.559601226902998e-05, + "loss": 1.2887, + "step": 661 + }, + { + "epoch": 2.06, + "learning_rate": 4.531970997948203e-05, + "loss": 1.3239, + "step": 662 + }, + { + "epoch": 2.06, + "learning_rate": 4.504400190595958e-05, + "loss": 1.3552, + "step": 663 + }, + { + "epoch": 2.07, + "learning_rate": 4.476889104460907e-05, + "loss": 1.3554, + "step": 664 + }, + { + "epoch": 2.07, + "learning_rate": 4.4494380385086986e-05, + "loss": 1.3333, + "step": 665 + }, + { + "epoch": 2.07, + "learning_rate": 4.422047291052728e-05, + "loss": 1.3107, + "step": 666 + }, + { + "epoch": 2.07, + "learning_rate": 4.3947171597509176e-05, + "loss": 1.3228, + "step": 667 + }, + { + "epoch": 2.08, + "learning_rate": 4.367447941602453e-05, + "loss": 1.3224, + "step": 668 + }, + { + "epoch": 2.08, + "learning_rate": 4.3402399329445855e-05, + "loss": 1.2844, + "step": 669 + }, + { + "epoch": 2.08, + "learning_rate": 4.3130934294493885e-05, + "loss": 1.3352, + "step": 670 + }, + { + "epoch": 2.09, + "learning_rate": 4.286008726120543e-05, + "loss": 1.3217, + "step": 671 + }, + { + "epoch": 2.09, + "learning_rate": 4.2589861172901634e-05, + "loss": 1.2976, + "step": 672 + }, + { + "epoch": 2.09, + "learning_rate": 4.232025896615559e-05, + "loss": 1.3108, + "step": 673 + }, + { + "epoch": 2.1, + "learning_rate": 4.2051283570760746e-05, + "loss": 1.2893, + "step": 674 + }, + { + "epoch": 2.1, + "learning_rate": 4.178293790969883e-05, + "loss": 1.3452, + "step": 675 + }, + { + "epoch": 2.1, + "learning_rate": 4.1515224899108164e-05, + "loss": 1.332, + "step": 676 + }, + { + "epoch": 2.11, + "learning_rate": 4.1248147448252185e-05, + "loss": 1.2998, + "step": 677 + }, + { + "epoch": 2.11, + "learning_rate": 4.098170845948736e-05, + "loss": 1.2952, + "step": 678 + }, + { + "epoch": 2.11, + "learning_rate": 4.071591082823215e-05, + "loss": 1.3512, + "step": 679 + }, + { + "epoch": 2.12, + "learning_rate": 4.045075744293525e-05, + "loss": 1.3571, + "step": 680 + }, + { + "epoch": 2.12, + "learning_rate": 4.01862511850442e-05, + "loss": 1.3415, + "step": 681 + }, + { + "epoch": 2.12, + "learning_rate": 3.992239492897429e-05, + "loss": 1.3264, + "step": 682 + }, + { + "epoch": 2.12, + "learning_rate": 3.965919154207708e-05, + "loss": 1.3013, + "step": 683 + }, + { + "epoch": 2.13, + "learning_rate": 3.939664388460932e-05, + "loss": 1.369, + "step": 684 + }, + { + "epoch": 2.13, + "learning_rate": 3.913475480970193e-05, + "loss": 1.2464, + "step": 685 + }, + { + "epoch": 2.13, + "learning_rate": 3.887352716332892e-05, + "loss": 1.3162, + "step": 686 + }, + { + "epoch": 2.14, + "learning_rate": 3.861296378427656e-05, + "loss": 1.3221, + "step": 687 + }, + { + "epoch": 2.14, + "learning_rate": 3.835306750411237e-05, + "loss": 1.3219, + "step": 688 + }, + { + "epoch": 2.14, + "learning_rate": 3.8093841147154475e-05, + "loss": 1.3446, + "step": 689 + }, + { + "epoch": 2.15, + "learning_rate": 3.783528753044093e-05, + "loss": 1.3667, + "step": 690 + }, + { + "epoch": 2.15, + "learning_rate": 3.757740946369901e-05, + "loss": 1.3098, + "step": 691 + }, + { + "epoch": 2.15, + "learning_rate": 3.732020974931471e-05, + "loss": 1.3017, + "step": 692 + }, + { + "epoch": 2.16, + "learning_rate": 3.7063691182302304e-05, + "loss": 1.3354, + "step": 693 + }, + { + "epoch": 2.16, + "learning_rate": 3.680785655027399e-05, + "loss": 1.3081, + "step": 694 + }, + { + "epoch": 2.16, + "learning_rate": 3.6552708633409613e-05, + "loss": 1.2563, + "step": 695 + }, + { + "epoch": 2.16, + "learning_rate": 3.6298250204426334e-05, + "loss": 1.307, + "step": 696 + }, + { + "epoch": 2.17, + "learning_rate": 3.6044484028548676e-05, + "loss": 1.2907, + "step": 697 + }, + { + "epoch": 2.17, + "learning_rate": 3.5791412863478326e-05, + "loss": 1.3023, + "step": 698 + }, + { + "epoch": 2.17, + "learning_rate": 3.553903945936421e-05, + "loss": 1.3144, + "step": 699 + }, + { + "epoch": 2.18, + "learning_rate": 3.528736655877264e-05, + "loss": 1.3015, + "step": 700 + }, + { + "epoch": 2.18, + "eval_loss": 1.6665308475494385, + "eval_runtime": 233.6943, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 700 + }, + { + "epoch": 2.18, + "learning_rate": 3.5036396896657455e-05, + "loss": 1.2943, + "step": 701 + }, + { + "epoch": 2.18, + "learning_rate": 3.478613320033042e-05, + "loss": 1.3333, + "step": 702 + }, + { + "epoch": 2.19, + "learning_rate": 3.453657818943142e-05, + "loss": 1.2983, + "step": 703 + }, + { + "epoch": 2.19, + "learning_rate": 3.4287734575898975e-05, + "loss": 1.3392, + "step": 704 + }, + { + "epoch": 2.19, + "learning_rate": 3.403960506394092e-05, + "loss": 1.2677, + "step": 705 + }, + { + "epoch": 2.2, + "learning_rate": 3.379219235000463e-05, + "loss": 1.3197, + "step": 706 + }, + { + "epoch": 2.2, + "learning_rate": 3.3545499122748216e-05, + "loss": 1.3343, + "step": 707 + }, + { + "epoch": 2.2, + "learning_rate": 3.329952806301092e-05, + "loss": 1.3591, + "step": 708 + }, + { + "epoch": 2.21, + "learning_rate": 3.305428184378413e-05, + "loss": 1.3272, + "step": 709 + }, + { + "epoch": 2.21, + "learning_rate": 3.280976313018239e-05, + "loss": 1.3499, + "step": 710 + }, + { + "epoch": 2.21, + "learning_rate": 3.256597457941429e-05, + "loss": 1.3371, + "step": 711 + }, + { + "epoch": 2.21, + "learning_rate": 3.232291884075373e-05, + "loss": 1.312, + "step": 712 + }, + { + "epoch": 2.22, + "learning_rate": 3.208059855551101e-05, + "loss": 1.3502, + "step": 713 + }, + { + "epoch": 2.22, + "learning_rate": 3.18390163570042e-05, + "loss": 1.3094, + "step": 714 + }, + { + "epoch": 2.22, + "learning_rate": 3.1598174870530604e-05, + "loss": 1.3181, + "step": 715 + }, + { + "epoch": 2.23, + "learning_rate": 3.1358076713338014e-05, + "loss": 1.3011, + "step": 716 + }, + { + "epoch": 2.23, + "learning_rate": 3.1118724494596405e-05, + "loss": 1.3054, + "step": 717 + }, + { + "epoch": 2.23, + "learning_rate": 3.0880120815369694e-05, + "loss": 1.3215, + "step": 718 + }, + { + "epoch": 2.24, + "learning_rate": 3.0642268268587136e-05, + "loss": 1.2908, + "step": 719 + }, + { + "epoch": 2.24, + "learning_rate": 3.0405169439015557e-05, + "loss": 1.3334, + "step": 720 + }, + { + "epoch": 2.24, + "learning_rate": 3.0168826903230906e-05, + "loss": 1.3275, + "step": 721 + }, + { + "epoch": 2.25, + "learning_rate": 2.9933243229590568e-05, + "loss": 1.3329, + "step": 722 + }, + { + "epoch": 2.25, + "learning_rate": 2.969842097820519e-05, + "loss": 1.3185, + "step": 723 + }, + { + "epoch": 2.25, + "learning_rate": 2.9464362700910943e-05, + "loss": 1.3443, + "step": 724 + }, + { + "epoch": 2.26, + "learning_rate": 2.9231070941241988e-05, + "loss": 1.3034, + "step": 725 + }, + { + "epoch": 2.26, + "learning_rate": 2.899854823440241e-05, + "loss": 1.304, + "step": 726 + }, + { + "epoch": 2.26, + "learning_rate": 2.8766797107239164e-05, + "loss": 1.3136, + "step": 727 + }, + { + "epoch": 2.26, + "learning_rate": 2.8535820078214236e-05, + "loss": 1.2894, + "step": 728 + }, + { + "epoch": 2.27, + "learning_rate": 2.8305619657377413e-05, + "loss": 1.3303, + "step": 729 + }, + { + "epoch": 2.27, + "learning_rate": 2.8076198346339113e-05, + "loss": 1.3158, + "step": 730 + }, + { + "epoch": 2.27, + "learning_rate": 2.7847558638242964e-05, + "loss": 1.3071, + "step": 731 + }, + { + "epoch": 2.28, + "learning_rate": 2.7619703017738917e-05, + "loss": 1.2951, + "step": 732 + }, + { + "epoch": 2.28, + "learning_rate": 2.7392633960956127e-05, + "loss": 1.3138, + "step": 733 + }, + { + "epoch": 2.28, + "learning_rate": 2.7166353935476085e-05, + "loss": 1.3523, + "step": 734 + }, + { + "epoch": 2.29, + "learning_rate": 2.694086540030587e-05, + "loss": 1.2937, + "step": 735 + }, + { + "epoch": 2.29, + "learning_rate": 2.671617080585127e-05, + "loss": 1.3493, + "step": 736 + }, + { + "epoch": 2.29, + "learning_rate": 2.6492272593890267e-05, + "loss": 1.309, + "step": 737 + }, + { + "epoch": 2.3, + "learning_rate": 2.6269173197546527e-05, + "loss": 1.3188, + "step": 738 + }, + { + "epoch": 2.3, + "learning_rate": 2.6046875041262852e-05, + "loss": 1.3202, + "step": 739 + }, + { + "epoch": 2.3, + "learning_rate": 2.5825380540774914e-05, + "loss": 1.359, + "step": 740 + }, + { + "epoch": 2.3, + "learning_rate": 2.560469210308497e-05, + "loss": 1.2837, + "step": 741 + }, + { + "epoch": 2.31, + "learning_rate": 2.5384812126435697e-05, + "loss": 1.3195, + "step": 742 + }, + { + "epoch": 2.31, + "learning_rate": 2.5165743000284213e-05, + "loss": 1.2797, + "step": 743 + }, + { + "epoch": 2.31, + "learning_rate": 2.4947487105275945e-05, + "loss": 1.3656, + "step": 744 + }, + { + "epoch": 2.32, + "learning_rate": 2.4730046813218987e-05, + "loss": 1.3094, + "step": 745 + }, + { + "epoch": 2.32, + "learning_rate": 2.451342448705811e-05, + "loss": 1.3176, + "step": 746 + }, + { + "epoch": 2.32, + "learning_rate": 2.4297622480849104e-05, + "loss": 1.3318, + "step": 747 + }, + { + "epoch": 2.33, + "learning_rate": 2.408264313973343e-05, + "loss": 1.3367, + "step": 748 + }, + { + "epoch": 2.33, + "learning_rate": 2.3868488799912414e-05, + "loss": 1.2717, + "step": 749 + }, + { + "epoch": 2.33, + "learning_rate": 2.3655161788622138e-05, + "loss": 1.3328, + "step": 750 + }, + { + "epoch": 2.33, + "eval_loss": 1.6713805198669434, + "eval_runtime": 233.7116, + "eval_samples_per_second": 16.358, + "eval_steps_per_second": 4.091, + "step": 750 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 2.103203743137792e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-750/training_args.bin b/checkpoint-750/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-750/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-800/README.md b/checkpoint-800/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-800/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-800/adapter_config.json b/checkpoint-800/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-800/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-800/adapter_model.bin b/checkpoint-800/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..8a0e1634cd4a21180a7fea4effb91d7c09d315d0 --- /dev/null +++ b/checkpoint-800/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:045437c25ad655c92b8619b737f0a85b989acbbaf9c80f857b5c834c795e5810 +size 84046925 diff --git a/checkpoint-800/optimizer.pt b/checkpoint-800/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..284eef2f3a01e4af3ee53ce67ace12ef753d8d46 --- /dev/null +++ b/checkpoint-800/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ac9369575e495ace7b4088e53efce4e7d5806431cd446cd67ad9b82a84d671de +size 168039557 diff --git a/checkpoint-800/rng_state.pth b/checkpoint-800/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..2f7555fae3bb2e4cbfd9ee2566d1d90d7c193c0f --- /dev/null +++ b/checkpoint-800/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:451c6ec9fec02a2aa8941909c4313bcd10d98312558c2763069921ec9b94f2a0 +size 14575 diff --git a/checkpoint-800/scheduler.pt b/checkpoint-800/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..092b4c0eaa72df26c19b58ef11afe521e1a2f24d --- /dev/null +++ b/checkpoint-800/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:99eb7c06ebf2640f5bfa4027038ce7f5f5c89edba6b490e4b1e9461c6e23122d +size 627 diff --git a/checkpoint-800/trainer_state.json b/checkpoint-800/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..2f8e67ae3e01adebebb3921260b2d5a7adcf84a8 --- /dev/null +++ b/checkpoint-800/trainer_state.json @@ -0,0 +1,4947 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 2.488335925349922, + "eval_steps": 50, + "global_step": 800, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012781362737859735, + "loss": 1.4867, + "step": 401 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012749683144111095, + "loss": 1.4923, + "step": 402 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012717973669288513, + "loss": 1.4858, + "step": 403 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012686234657981933, + "loss": 1.4464, + "step": 404 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012654466455102272, + "loss": 1.4598, + "step": 405 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012622669405877685, + "loss": 1.4237, + "step": 406 + }, + { + "epoch": 1.27, + "learning_rate": 0.0001259084385584979, + "loss": 1.475, + "step": 407 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012558990150869935, + "loss": 1.5201, + "step": 408 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012527108637095427, + "loss": 1.4735, + "step": 409 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012495199660985767, + "loss": 1.4676, + "step": 410 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012463263569298914, + "loss": 1.4671, + "step": 411 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012431300709087468, + "loss": 1.4724, + "step": 412 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012399311427694945, + "loss": 1.5451, + "step": 413 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001236729607275197, + "loss": 1.492, + "step": 414 + }, + { + "epoch": 1.29, + "learning_rate": 0.00012335254992172512, + "loss": 1.5186, + "step": 415 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001230318853415012, + "loss": 1.4622, + "step": 416 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012271097047154096, + "loss": 1.4937, + "step": 417 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012238980879925756, + "loss": 1.4575, + "step": 418 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012206840381474608, + "loss": 1.4801, + "step": 419 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012174675901074577, + "loss": 1.4523, + "step": 420 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012142487788260191, + "loss": 1.4957, + "step": 421 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012110276392822799, + "loss": 1.4757, + "step": 422 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001207804206480677, + "loss": 1.4769, + "step": 423 + }, + { + "epoch": 1.32, + "learning_rate": 0.00012045785154505676, + "loss": 1.4435, + "step": 424 + }, + { + "epoch": 1.32, + "learning_rate": 0.000120135060124585, + "loss": 1.5211, + "step": 425 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011981204989445811, + "loss": 1.4248, + "step": 426 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011948882436485969, + "loss": 1.4883, + "step": 427 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011916538704831293, + "loss": 1.4919, + "step": 428 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011884174145964262, + "loss": 1.4689, + "step": 429 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011851789111593688, + "loss": 1.4071, + "step": 430 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011819383953650874, + "loss": 1.4418, + "step": 431 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011786959024285826, + "loss": 1.5206, + "step": 432 + }, + { + "epoch": 1.35, + "learning_rate": 0.00011754514675863408, + "loss": 1.446, + "step": 433 + }, + { + "epoch": 1.35, + "learning_rate": 0.000117220512609595, + "loss": 1.5165, + "step": 434 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001168956913235719, + "loss": 1.4119, + "step": 435 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011657068643042924, + "loss": 1.503, + "step": 436 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011624550146202682, + "loss": 1.4573, + "step": 437 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011592013995218123, + "loss": 1.4707, + "step": 438 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011559460543662768, + "loss": 1.4304, + "step": 439 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011526890145298137, + "loss": 1.4465, + "step": 440 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001149430315406991, + "loss": 1.4912, + "step": 441 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001146169992410409, + "loss": 1.4549, + "step": 442 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011429080809703145, + "loss": 1.4528, + "step": 443 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011396446165342165, + "loss": 1.4148, + "step": 444 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011363796345665001, + "loss": 1.467, + "step": 445 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001133113170548041, + "loss": 1.492, + "step": 446 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011298452599758217, + "loss": 1.5244, + "step": 447 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011265759383625436, + "loss": 1.4553, + "step": 448 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001123305241236243, + "loss": 1.4764, + "step": 449 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011200332041399027, + "loss": 1.4354, + "step": 450 + }, + { + "epoch": 1.4, + "eval_loss": 1.6193681955337524, + "eval_runtime": 233.6751, + "eval_samples_per_second": 16.36, + "eval_steps_per_second": 4.091, + "step": 450 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011167598626310682, + "loss": 1.4946, + "step": 451 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011134852522814596, + "loss": 1.4558, + "step": 452 + }, + { + "epoch": 1.41, + "learning_rate": 0.0001110209408676586, + "loss": 1.4549, + "step": 453 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011069323674153585, + "loss": 1.4992, + "step": 454 + }, + { + "epoch": 1.42, + "learning_rate": 0.0001103654164109702, + "loss": 1.4828, + "step": 455 + }, + { + "epoch": 1.42, + "learning_rate": 0.00011003748343841711, + "loss": 1.4939, + "step": 456 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010970944138755604, + "loss": 1.4761, + "step": 457 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010938129382325184, + "loss": 1.4394, + "step": 458 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010905304431151602, + "loss": 1.4852, + "step": 459 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010872469641946783, + "loss": 1.4479, + "step": 460 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010839625371529583, + "loss": 1.5161, + "step": 461 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010806771976821872, + "loss": 1.5104, + "step": 462 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001077390981484469, + "loss": 1.5056, + "step": 463 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010741039242714337, + "loss": 1.4919, + "step": 464 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010708160617638521, + "loss": 1.4605, + "step": 465 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010675274296912452, + "loss": 1.5191, + "step": 466 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010642380637914975, + "loss": 1.4504, + "step": 467 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010609479998104684, + "loss": 1.4619, + "step": 468 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010576572735016016, + "loss": 1.4619, + "step": 469 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010543659206255409, + "loss": 1.4962, + "step": 470 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010510739769497378, + "loss": 1.4901, + "step": 471 + }, + { + "epoch": 1.47, + "learning_rate": 0.0001047781478248063, + "loss": 1.4708, + "step": 472 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010444884603004213, + "loss": 1.4756, + "step": 473 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010411949588923577, + "loss": 1.3948, + "step": 474 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010379010098146728, + "loss": 1.5183, + "step": 475 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010346066488630308, + "loss": 1.4252, + "step": 476 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010313119118375727, + "loss": 1.4686, + "step": 477 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010280168345425256, + "loss": 1.5285, + "step": 478 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010247214527858149, + "loss": 1.4649, + "step": 479 + }, + { + "epoch": 1.49, + "learning_rate": 0.0001021425802378674, + "loss": 1.4602, + "step": 480 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010181299191352566, + "loss": 1.5102, + "step": 481 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010148338388722465, + "loss": 1.4894, + "step": 482 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010115375974084677, + "loss": 1.501, + "step": 483 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010082412305644964, + "loss": 1.481, + "step": 484 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010049447741622717, + "loss": 1.4927, + "step": 485 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010016482640247058, + "loss": 1.512, + "step": 486 + }, + { + "epoch": 1.51, + "learning_rate": 9.983517359752945e-05, + "loss": 1.4622, + "step": 487 + }, + { + "epoch": 1.52, + "learning_rate": 9.950552258377284e-05, + "loss": 1.4956, + "step": 488 + }, + { + "epoch": 1.52, + "learning_rate": 9.917587694355037e-05, + "loss": 1.493, + "step": 489 + }, + { + "epoch": 1.52, + "learning_rate": 9.884624025915328e-05, + "loss": 1.4629, + "step": 490 + }, + { + "epoch": 1.53, + "learning_rate": 9.851661611277537e-05, + "loss": 1.4531, + "step": 491 + }, + { + "epoch": 1.53, + "learning_rate": 9.818700808647435e-05, + "loss": 1.4656, + "step": 492 + }, + { + "epoch": 1.53, + "learning_rate": 9.785741976213261e-05, + "loss": 1.4982, + "step": 493 + }, + { + "epoch": 1.54, + "learning_rate": 9.752785472141854e-05, + "loss": 1.5053, + "step": 494 + }, + { + "epoch": 1.54, + "learning_rate": 9.719831654574745e-05, + "loss": 1.4619, + "step": 495 + }, + { + "epoch": 1.54, + "learning_rate": 9.686880881624275e-05, + "loss": 1.486, + "step": 496 + }, + { + "epoch": 1.55, + "learning_rate": 9.653933511369696e-05, + "loss": 1.4788, + "step": 497 + }, + { + "epoch": 1.55, + "learning_rate": 9.620989901853275e-05, + "loss": 1.4663, + "step": 498 + }, + { + "epoch": 1.55, + "learning_rate": 9.588050411076424e-05, + "loss": 1.5138, + "step": 499 + }, + { + "epoch": 1.56, + "learning_rate": 9.555115396995788e-05, + "loss": 1.4427, + "step": 500 + }, + { + "epoch": 1.56, + "eval_loss": 1.6187018156051636, + "eval_runtime": 233.6591, + "eval_samples_per_second": 16.361, + "eval_steps_per_second": 4.091, + "step": 500 + }, + { + "epoch": 1.56, + "learning_rate": 9.522185217519371e-05, + "loss": 1.4696, + "step": 501 + }, + { + "epoch": 1.56, + "learning_rate": 9.489260230502626e-05, + "loss": 1.4052, + "step": 502 + }, + { + "epoch": 1.56, + "learning_rate": 9.45634079374459e-05, + "loss": 1.4688, + "step": 503 + }, + { + "epoch": 1.57, + "learning_rate": 9.423427264983986e-05, + "loss": 1.4266, + "step": 504 + }, + { + "epoch": 1.57, + "learning_rate": 9.390520001895321e-05, + "loss": 1.4887, + "step": 505 + }, + { + "epoch": 1.57, + "learning_rate": 9.357619362085027e-05, + "loss": 1.4992, + "step": 506 + }, + { + "epoch": 1.58, + "learning_rate": 9.32472570308755e-05, + "loss": 1.4626, + "step": 507 + }, + { + "epoch": 1.58, + "learning_rate": 9.291839382361481e-05, + "loss": 1.4984, + "step": 508 + }, + { + "epoch": 1.58, + "learning_rate": 9.258960757285664e-05, + "loss": 1.3692, + "step": 509 + }, + { + "epoch": 1.59, + "learning_rate": 9.226090185155314e-05, + "loss": 1.4325, + "step": 510 + }, + { + "epoch": 1.59, + "learning_rate": 9.19322802317813e-05, + "loss": 1.5049, + "step": 511 + }, + { + "epoch": 1.59, + "learning_rate": 9.160374628470421e-05, + "loss": 1.4589, + "step": 512 + }, + { + "epoch": 1.6, + "learning_rate": 9.127530358053218e-05, + "loss": 1.4291, + "step": 513 + }, + { + "epoch": 1.6, + "learning_rate": 9.094695568848402e-05, + "loss": 1.4474, + "step": 514 + }, + { + "epoch": 1.6, + "learning_rate": 9.061870617674817e-05, + "loss": 1.513, + "step": 515 + }, + { + "epoch": 1.6, + "learning_rate": 9.029055861244397e-05, + "loss": 1.4609, + "step": 516 + }, + { + "epoch": 1.61, + "learning_rate": 8.99625165615829e-05, + "loss": 1.5144, + "step": 517 + }, + { + "epoch": 1.61, + "learning_rate": 8.963458358902985e-05, + "loss": 1.4294, + "step": 518 + }, + { + "epoch": 1.61, + "learning_rate": 8.93067632584642e-05, + "loss": 1.4516, + "step": 519 + }, + { + "epoch": 1.62, + "learning_rate": 8.897905913234143e-05, + "loss": 1.4659, + "step": 520 + }, + { + "epoch": 1.62, + "learning_rate": 8.865147477185405e-05, + "loss": 1.4787, + "step": 521 + }, + { + "epoch": 1.62, + "learning_rate": 8.832401373689319e-05, + "loss": 1.4601, + "step": 522 + }, + { + "epoch": 1.63, + "learning_rate": 8.799667958600973e-05, + "loss": 1.4955, + "step": 523 + }, + { + "epoch": 1.63, + "learning_rate": 8.766947587637573e-05, + "loss": 1.4231, + "step": 524 + }, + { + "epoch": 1.63, + "learning_rate": 8.734240616374565e-05, + "loss": 1.4952, + "step": 525 + }, + { + "epoch": 1.64, + "learning_rate": 8.701547400241788e-05, + "loss": 1.4707, + "step": 526 + }, + { + "epoch": 1.64, + "learning_rate": 8.668868294519593e-05, + "loss": 1.5023, + "step": 527 + }, + { + "epoch": 1.64, + "learning_rate": 8.636203654335002e-05, + "loss": 1.4702, + "step": 528 + }, + { + "epoch": 1.65, + "learning_rate": 8.603553834657836e-05, + "loss": 1.4399, + "step": 529 + }, + { + "epoch": 1.65, + "learning_rate": 8.570919190296855e-05, + "loss": 1.5175, + "step": 530 + }, + { + "epoch": 1.65, + "learning_rate": 8.53830007589591e-05, + "loss": 1.4715, + "step": 531 + }, + { + "epoch": 1.65, + "learning_rate": 8.505696845930096e-05, + "loss": 1.5292, + "step": 532 + }, + { + "epoch": 1.66, + "learning_rate": 8.473109854701869e-05, + "loss": 1.5287, + "step": 533 + }, + { + "epoch": 1.66, + "learning_rate": 8.440539456337235e-05, + "loss": 1.4762, + "step": 534 + }, + { + "epoch": 1.66, + "learning_rate": 8.407986004781879e-05, + "loss": 1.4536, + "step": 535 + }, + { + "epoch": 1.67, + "learning_rate": 8.375449853797322e-05, + "loss": 1.5018, + "step": 536 + }, + { + "epoch": 1.67, + "learning_rate": 8.342931356957076e-05, + "loss": 1.4723, + "step": 537 + }, + { + "epoch": 1.67, + "learning_rate": 8.310430867642812e-05, + "loss": 1.4905, + "step": 538 + }, + { + "epoch": 1.68, + "learning_rate": 8.277948739040503e-05, + "loss": 1.4651, + "step": 539 + }, + { + "epoch": 1.68, + "learning_rate": 8.245485324136597e-05, + "loss": 1.4482, + "step": 540 + }, + { + "epoch": 1.68, + "learning_rate": 8.213040975714175e-05, + "loss": 1.3977, + "step": 541 + }, + { + "epoch": 1.69, + "learning_rate": 8.180616046349129e-05, + "loss": 1.5594, + "step": 542 + }, + { + "epoch": 1.69, + "learning_rate": 8.148210888406316e-05, + "loss": 1.4995, + "step": 543 + }, + { + "epoch": 1.69, + "learning_rate": 8.115825854035737e-05, + "loss": 1.5106, + "step": 544 + }, + { + "epoch": 1.7, + "learning_rate": 8.083461295168707e-05, + "loss": 1.4219, + "step": 545 + }, + { + "epoch": 1.7, + "learning_rate": 8.051117563514036e-05, + "loss": 1.4766, + "step": 546 + }, + { + "epoch": 1.7, + "learning_rate": 8.018795010554193e-05, + "loss": 1.5241, + "step": 547 + }, + { + "epoch": 1.7, + "learning_rate": 7.986493987541502e-05, + "loss": 1.4673, + "step": 548 + }, + { + "epoch": 1.71, + "learning_rate": 7.954214845494325e-05, + "loss": 1.4236, + "step": 549 + }, + { + "epoch": 1.71, + "learning_rate": 7.921957935193232e-05, + "loss": 1.4687, + "step": 550 + }, + { + "epoch": 1.71, + "eval_loss": 1.617763876914978, + "eval_runtime": 233.6334, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 550 + }, + { + "epoch": 1.71, + "learning_rate": 7.889723607177202e-05, + "loss": 1.4412, + "step": 551 + }, + { + "epoch": 1.72, + "learning_rate": 7.857512211739813e-05, + "loss": 1.4464, + "step": 552 + }, + { + "epoch": 1.72, + "learning_rate": 7.825324098925427e-05, + "loss": 1.4043, + "step": 553 + }, + { + "epoch": 1.72, + "learning_rate": 7.793159618525393e-05, + "loss": 1.4384, + "step": 554 + }, + { + "epoch": 1.73, + "learning_rate": 7.761019120074245e-05, + "loss": 1.4781, + "step": 555 + }, + { + "epoch": 1.73, + "learning_rate": 7.728902952845905e-05, + "loss": 1.4311, + "step": 556 + }, + { + "epoch": 1.73, + "learning_rate": 7.696811465849883e-05, + "loss": 1.4926, + "step": 557 + }, + { + "epoch": 1.74, + "learning_rate": 7.664745007827489e-05, + "loss": 1.4739, + "step": 558 + }, + { + "epoch": 1.74, + "learning_rate": 7.632703927248033e-05, + "loss": 1.509, + "step": 559 + }, + { + "epoch": 1.74, + "learning_rate": 7.60068857230506e-05, + "loss": 1.4555, + "step": 560 + }, + { + "epoch": 1.74, + "learning_rate": 7.568699290912533e-05, + "loss": 1.4588, + "step": 561 + }, + { + "epoch": 1.75, + "learning_rate": 7.536736430701088e-05, + "loss": 1.4574, + "step": 562 + }, + { + "epoch": 1.75, + "learning_rate": 7.504800339014232e-05, + "loss": 1.4805, + "step": 563 + }, + { + "epoch": 1.75, + "learning_rate": 7.472891362904577e-05, + "loss": 1.5081, + "step": 564 + }, + { + "epoch": 1.76, + "learning_rate": 7.441009849130067e-05, + "loss": 1.5081, + "step": 565 + }, + { + "epoch": 1.76, + "learning_rate": 7.409156144150213e-05, + "loss": 1.4548, + "step": 566 + }, + { + "epoch": 1.76, + "learning_rate": 7.377330594122317e-05, + "loss": 1.4478, + "step": 567 + }, + { + "epoch": 1.77, + "learning_rate": 7.34553354489773e-05, + "loss": 1.5048, + "step": 568 + }, + { + "epoch": 1.77, + "learning_rate": 7.31376534201807e-05, + "loss": 1.4889, + "step": 569 + }, + { + "epoch": 1.77, + "learning_rate": 7.282026330711489e-05, + "loss": 1.5045, + "step": 570 + }, + { + "epoch": 1.78, + "learning_rate": 7.250316855888906e-05, + "loss": 1.4352, + "step": 571 + }, + { + "epoch": 1.78, + "learning_rate": 7.218637262140268e-05, + "loss": 1.4881, + "step": 572 + }, + { + "epoch": 1.78, + "learning_rate": 7.186987893730797e-05, + "loss": 1.449, + "step": 573 + }, + { + "epoch": 1.79, + "learning_rate": 7.155369094597253e-05, + "loss": 1.4146, + "step": 574 + }, + { + "epoch": 1.79, + "learning_rate": 7.1237812083442e-05, + "loss": 1.4462, + "step": 575 + }, + { + "epoch": 1.79, + "learning_rate": 7.092224578240269e-05, + "loss": 1.4509, + "step": 576 + }, + { + "epoch": 1.79, + "learning_rate": 7.060699547214427e-05, + "loss": 1.4483, + "step": 577 + }, + { + "epoch": 1.8, + "learning_rate": 7.029206457852247e-05, + "loss": 1.4348, + "step": 578 + }, + { + "epoch": 1.8, + "learning_rate": 6.997745652392191e-05, + "loss": 1.4931, + "step": 579 + }, + { + "epoch": 1.8, + "learning_rate": 6.966317472721897e-05, + "loss": 1.4132, + "step": 580 + }, + { + "epoch": 1.81, + "learning_rate": 6.934922260374437e-05, + "loss": 1.3974, + "step": 581 + }, + { + "epoch": 1.81, + "learning_rate": 6.903560356524641e-05, + "loss": 1.4326, + "step": 582 + }, + { + "epoch": 1.81, + "learning_rate": 6.872232101985363e-05, + "loss": 1.4349, + "step": 583 + }, + { + "epoch": 1.82, + "learning_rate": 6.840937837203791e-05, + "loss": 1.4528, + "step": 584 + }, + { + "epoch": 1.82, + "learning_rate": 6.809677902257742e-05, + "loss": 1.4365, + "step": 585 + }, + { + "epoch": 1.82, + "learning_rate": 6.778452636851968e-05, + "loss": 1.4702, + "step": 586 + }, + { + "epoch": 1.83, + "learning_rate": 6.747262380314463e-05, + "loss": 1.458, + "step": 587 + }, + { + "epoch": 1.83, + "learning_rate": 6.71610747159277e-05, + "loss": 1.5413, + "step": 588 + }, + { + "epoch": 1.83, + "learning_rate": 6.684988249250314e-05, + "loss": 1.4205, + "step": 589 + }, + { + "epoch": 1.84, + "learning_rate": 6.653905051462708e-05, + "loss": 1.4643, + "step": 590 + }, + { + "epoch": 1.84, + "learning_rate": 6.622858216014084e-05, + "loss": 1.4071, + "step": 591 + }, + { + "epoch": 1.84, + "learning_rate": 6.591848080293418e-05, + "loss": 1.4669, + "step": 592 + }, + { + "epoch": 1.84, + "learning_rate": 6.56087498129087e-05, + "loss": 1.5062, + "step": 593 + }, + { + "epoch": 1.85, + "learning_rate": 6.52993925559412e-05, + "loss": 1.4334, + "step": 594 + }, + { + "epoch": 1.85, + "learning_rate": 6.499041239384698e-05, + "loss": 1.4696, + "step": 595 + }, + { + "epoch": 1.85, + "learning_rate": 6.468181268434354e-05, + "loss": 1.4575, + "step": 596 + }, + { + "epoch": 1.86, + "learning_rate": 6.437359678101389e-05, + "loss": 1.4432, + "step": 597 + }, + { + "epoch": 1.86, + "learning_rate": 6.406576803327022e-05, + "loss": 1.5047, + "step": 598 + }, + { + "epoch": 1.86, + "learning_rate": 6.375832978631743e-05, + "loss": 1.4297, + "step": 599 + }, + { + "epoch": 1.87, + "learning_rate": 6.345128538111685e-05, + "loss": 1.461, + "step": 600 + }, + { + "epoch": 1.87, + "eval_loss": 1.6174333095550537, + "eval_runtime": 233.649, + "eval_samples_per_second": 16.362, + "eval_steps_per_second": 4.092, + "step": 600 + }, + { + "epoch": 1.87, + "learning_rate": 6.314463815434988e-05, + "loss": 1.4978, + "step": 601 + }, + { + "epoch": 1.87, + "learning_rate": 6.283839143838169e-05, + "loss": 1.426, + "step": 602 + }, + { + "epoch": 1.88, + "learning_rate": 6.253254856122511e-05, + "loss": 1.4657, + "step": 603 + }, + { + "epoch": 1.88, + "learning_rate": 6.222711284650444e-05, + "loss": 1.5282, + "step": 604 + }, + { + "epoch": 1.88, + "learning_rate": 6.192208761341925e-05, + "loss": 1.4897, + "step": 605 + }, + { + "epoch": 1.88, + "learning_rate": 6.161747617670839e-05, + "loss": 1.4827, + "step": 606 + }, + { + "epoch": 1.89, + "learning_rate": 6.131328184661396e-05, + "loss": 1.4507, + "step": 607 + }, + { + "epoch": 1.89, + "learning_rate": 6.100950792884533e-05, + "loss": 1.4461, + "step": 608 + }, + { + "epoch": 1.89, + "learning_rate": 6.070615772454312e-05, + "loss": 1.4187, + "step": 609 + }, + { + "epoch": 1.9, + "learning_rate": 6.040323453024351e-05, + "loss": 1.4704, + "step": 610 + }, + { + "epoch": 1.9, + "learning_rate": 6.0100741637842316e-05, + "loss": 1.4869, + "step": 611 + }, + { + "epoch": 1.9, + "learning_rate": 5.979868233455917e-05, + "loss": 1.4657, + "step": 612 + }, + { + "epoch": 1.91, + "learning_rate": 5.949705990290186e-05, + "loss": 1.4234, + "step": 613 + }, + { + "epoch": 1.91, + "learning_rate": 5.919587762063072e-05, + "loss": 1.4519, + "step": 614 + }, + { + "epoch": 1.91, + "learning_rate": 5.889513876072283e-05, + "loss": 1.4588, + "step": 615 + }, + { + "epoch": 1.92, + "learning_rate": 5.859484659133663e-05, + "loss": 1.4867, + "step": 616 + }, + { + "epoch": 1.92, + "learning_rate": 5.829500437577626e-05, + "loss": 1.5157, + "step": 617 + }, + { + "epoch": 1.92, + "learning_rate": 5.799561537245628e-05, + "loss": 1.4492, + "step": 618 + }, + { + "epoch": 1.93, + "learning_rate": 5.769668283486607e-05, + "loss": 1.514, + "step": 619 + }, + { + "epoch": 1.93, + "learning_rate": 5.739821001153451e-05, + "loss": 1.5127, + "step": 620 + }, + { + "epoch": 1.93, + "learning_rate": 5.710020014599486e-05, + "loss": 1.4204, + "step": 621 + }, + { + "epoch": 1.93, + "learning_rate": 5.680265647674925e-05, + "loss": 1.4346, + "step": 622 + }, + { + "epoch": 1.94, + "learning_rate": 5.650558223723365e-05, + "loss": 1.4342, + "step": 623 + }, + { + "epoch": 1.94, + "learning_rate": 5.620898065578268e-05, + "loss": 1.4699, + "step": 624 + }, + { + "epoch": 1.94, + "learning_rate": 5.591285495559453e-05, + "loss": 1.5088, + "step": 625 + }, + { + "epoch": 1.95, + "learning_rate": 5.561720835469602e-05, + "loss": 1.5015, + "step": 626 + }, + { + "epoch": 1.95, + "learning_rate": 5.5322044065907475e-05, + "loss": 1.4243, + "step": 627 + }, + { + "epoch": 1.95, + "learning_rate": 5.502736529680785e-05, + "loss": 1.4553, + "step": 628 + }, + { + "epoch": 1.96, + "learning_rate": 5.47331752497001e-05, + "loss": 1.4419, + "step": 629 + }, + { + "epoch": 1.96, + "learning_rate": 5.443947712157587e-05, + "loss": 1.4172, + "step": 630 + }, + { + "epoch": 1.96, + "learning_rate": 5.41462741040814e-05, + "loss": 1.4888, + "step": 631 + }, + { + "epoch": 1.97, + "learning_rate": 5.385356938348234e-05, + "loss": 1.412, + "step": 632 + }, + { + "epoch": 1.97, + "learning_rate": 5.3561366140629274e-05, + "loss": 1.4327, + "step": 633 + }, + { + "epoch": 1.97, + "learning_rate": 5.326966755092334e-05, + "loss": 1.502, + "step": 634 + }, + { + "epoch": 1.98, + "learning_rate": 5.297847678428141e-05, + "loss": 1.4499, + "step": 635 + }, + { + "epoch": 1.98, + "learning_rate": 5.2687797005101834e-05, + "loss": 1.4783, + "step": 636 + }, + { + "epoch": 1.98, + "learning_rate": 5.239763137223004e-05, + "loss": 1.4378, + "step": 637 + }, + { + "epoch": 1.98, + "learning_rate": 5.21079830389241e-05, + "loss": 1.5055, + "step": 638 + }, + { + "epoch": 1.99, + "learning_rate": 5.18188551528207e-05, + "loss": 1.4963, + "step": 639 + }, + { + "epoch": 1.99, + "learning_rate": 5.1530250855900576e-05, + "loss": 1.4799, + "step": 640 + }, + { + "epoch": 1.99, + "learning_rate": 5.124217328445475e-05, + "loss": 1.4388, + "step": 641 + }, + { + "epoch": 2.0, + "learning_rate": 5.095462556905021e-05, + "loss": 1.484, + "step": 642 + }, + { + "epoch": 2.0, + "learning_rate": 5.0667610834495785e-05, + "loss": 1.4811, + "step": 643 + }, + { + "epoch": 2.0, + "learning_rate": 5.03811321998086e-05, + "loss": 1.2941, + "step": 644 + }, + { + "epoch": 2.01, + "learning_rate": 5.009519277817976e-05, + "loss": 1.3975, + "step": 645 + }, + { + "epoch": 2.01, + "learning_rate": 4.9809795676940815e-05, + "loss": 1.3432, + "step": 646 + }, + { + "epoch": 2.01, + "learning_rate": 4.952494399752976e-05, + "loss": 1.3014, + "step": 647 + }, + { + "epoch": 2.02, + "learning_rate": 4.924064083545744e-05, + "loss": 1.3491, + "step": 648 + }, + { + "epoch": 2.02, + "learning_rate": 4.8956889280274056e-05, + "loss": 1.3238, + "step": 649 + }, + { + "epoch": 2.02, + "learning_rate": 4.8673692415535186e-05, + "loss": 1.327, + "step": 650 + }, + { + "epoch": 2.02, + "eval_loss": 1.6340641975402832, + "eval_runtime": 233.6965, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 650 + }, + { + "epoch": 2.02, + "learning_rate": 4.83910533187688e-05, + "loss": 1.3208, + "step": 651 + }, + { + "epoch": 2.03, + "learning_rate": 4.810897506144137e-05, + "loss": 1.2936, + "step": 652 + }, + { + "epoch": 2.03, + "learning_rate": 4.782746070892472e-05, + "loss": 1.323, + "step": 653 + }, + { + "epoch": 2.03, + "learning_rate": 4.754651332046274e-05, + "loss": 1.3304, + "step": 654 + }, + { + "epoch": 2.04, + "learning_rate": 4.726613594913796e-05, + "loss": 1.2426, + "step": 655 + }, + { + "epoch": 2.04, + "learning_rate": 4.698633164183853e-05, + "loss": 1.2882, + "step": 656 + }, + { + "epoch": 2.04, + "learning_rate": 4.670710343922504e-05, + "loss": 1.3273, + "step": 657 + }, + { + "epoch": 2.05, + "learning_rate": 4.6428454375697485e-05, + "loss": 1.3391, + "step": 658 + }, + { + "epoch": 2.05, + "learning_rate": 4.615038747936237e-05, + "loss": 1.3143, + "step": 659 + }, + { + "epoch": 2.05, + "learning_rate": 4.587290577199965e-05, + "loss": 1.2846, + "step": 660 + }, + { + "epoch": 2.06, + "learning_rate": 4.559601226902998e-05, + "loss": 1.2887, + "step": 661 + }, + { + "epoch": 2.06, + "learning_rate": 4.531970997948203e-05, + "loss": 1.3239, + "step": 662 + }, + { + "epoch": 2.06, + "learning_rate": 4.504400190595958e-05, + "loss": 1.3552, + "step": 663 + }, + { + "epoch": 2.07, + "learning_rate": 4.476889104460907e-05, + "loss": 1.3554, + "step": 664 + }, + { + "epoch": 2.07, + "learning_rate": 4.4494380385086986e-05, + "loss": 1.3333, + "step": 665 + }, + { + "epoch": 2.07, + "learning_rate": 4.422047291052728e-05, + "loss": 1.3107, + "step": 666 + }, + { + "epoch": 2.07, + "learning_rate": 4.3947171597509176e-05, + "loss": 1.3228, + "step": 667 + }, + { + "epoch": 2.08, + "learning_rate": 4.367447941602453e-05, + "loss": 1.3224, + "step": 668 + }, + { + "epoch": 2.08, + "learning_rate": 4.3402399329445855e-05, + "loss": 1.2844, + "step": 669 + }, + { + "epoch": 2.08, + "learning_rate": 4.3130934294493885e-05, + "loss": 1.3352, + "step": 670 + }, + { + "epoch": 2.09, + "learning_rate": 4.286008726120543e-05, + "loss": 1.3217, + "step": 671 + }, + { + "epoch": 2.09, + "learning_rate": 4.2589861172901634e-05, + "loss": 1.2976, + "step": 672 + }, + { + "epoch": 2.09, + "learning_rate": 4.232025896615559e-05, + "loss": 1.3108, + "step": 673 + }, + { + "epoch": 2.1, + "learning_rate": 4.2051283570760746e-05, + "loss": 1.2893, + "step": 674 + }, + { + "epoch": 2.1, + "learning_rate": 4.178293790969883e-05, + "loss": 1.3452, + "step": 675 + }, + { + "epoch": 2.1, + "learning_rate": 4.1515224899108164e-05, + "loss": 1.332, + "step": 676 + }, + { + "epoch": 2.11, + "learning_rate": 4.1248147448252185e-05, + "loss": 1.2998, + "step": 677 + }, + { + "epoch": 2.11, + "learning_rate": 4.098170845948736e-05, + "loss": 1.2952, + "step": 678 + }, + { + "epoch": 2.11, + "learning_rate": 4.071591082823215e-05, + "loss": 1.3512, + "step": 679 + }, + { + "epoch": 2.12, + "learning_rate": 4.045075744293525e-05, + "loss": 1.3571, + "step": 680 + }, + { + "epoch": 2.12, + "learning_rate": 4.01862511850442e-05, + "loss": 1.3415, + "step": 681 + }, + { + "epoch": 2.12, + "learning_rate": 3.992239492897429e-05, + "loss": 1.3264, + "step": 682 + }, + { + "epoch": 2.12, + "learning_rate": 3.965919154207708e-05, + "loss": 1.3013, + "step": 683 + }, + { + "epoch": 2.13, + "learning_rate": 3.939664388460932e-05, + "loss": 1.369, + "step": 684 + }, + { + "epoch": 2.13, + "learning_rate": 3.913475480970193e-05, + "loss": 1.2464, + "step": 685 + }, + { + "epoch": 2.13, + "learning_rate": 3.887352716332892e-05, + "loss": 1.3162, + "step": 686 + }, + { + "epoch": 2.14, + "learning_rate": 3.861296378427656e-05, + "loss": 1.3221, + "step": 687 + }, + { + "epoch": 2.14, + "learning_rate": 3.835306750411237e-05, + "loss": 1.3219, + "step": 688 + }, + { + "epoch": 2.14, + "learning_rate": 3.8093841147154475e-05, + "loss": 1.3446, + "step": 689 + }, + { + "epoch": 2.15, + "learning_rate": 3.783528753044093e-05, + "loss": 1.3667, + "step": 690 + }, + { + "epoch": 2.15, + "learning_rate": 3.757740946369901e-05, + "loss": 1.3098, + "step": 691 + }, + { + "epoch": 2.15, + "learning_rate": 3.732020974931471e-05, + "loss": 1.3017, + "step": 692 + }, + { + "epoch": 2.16, + "learning_rate": 3.7063691182302304e-05, + "loss": 1.3354, + "step": 693 + }, + { + "epoch": 2.16, + "learning_rate": 3.680785655027399e-05, + "loss": 1.3081, + "step": 694 + }, + { + "epoch": 2.16, + "learning_rate": 3.6552708633409613e-05, + "loss": 1.2563, + "step": 695 + }, + { + "epoch": 2.16, + "learning_rate": 3.6298250204426334e-05, + "loss": 1.307, + "step": 696 + }, + { + "epoch": 2.17, + "learning_rate": 3.6044484028548676e-05, + "loss": 1.2907, + "step": 697 + }, + { + "epoch": 2.17, + "learning_rate": 3.5791412863478326e-05, + "loss": 1.3023, + "step": 698 + }, + { + "epoch": 2.17, + "learning_rate": 3.553903945936421e-05, + "loss": 1.3144, + "step": 699 + }, + { + "epoch": 2.18, + "learning_rate": 3.528736655877264e-05, + "loss": 1.3015, + "step": 700 + }, + { + "epoch": 2.18, + "eval_loss": 1.6665308475494385, + "eval_runtime": 233.6943, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 700 + }, + { + "epoch": 2.18, + "learning_rate": 3.5036396896657455e-05, + "loss": 1.2943, + "step": 701 + }, + { + "epoch": 2.18, + "learning_rate": 3.478613320033042e-05, + "loss": 1.3333, + "step": 702 + }, + { + "epoch": 2.19, + "learning_rate": 3.453657818943142e-05, + "loss": 1.2983, + "step": 703 + }, + { + "epoch": 2.19, + "learning_rate": 3.4287734575898975e-05, + "loss": 1.3392, + "step": 704 + }, + { + "epoch": 2.19, + "learning_rate": 3.403960506394092e-05, + "loss": 1.2677, + "step": 705 + }, + { + "epoch": 2.2, + "learning_rate": 3.379219235000463e-05, + "loss": 1.3197, + "step": 706 + }, + { + "epoch": 2.2, + "learning_rate": 3.3545499122748216e-05, + "loss": 1.3343, + "step": 707 + }, + { + "epoch": 2.2, + "learning_rate": 3.329952806301092e-05, + "loss": 1.3591, + "step": 708 + }, + { + "epoch": 2.21, + "learning_rate": 3.305428184378413e-05, + "loss": 1.3272, + "step": 709 + }, + { + "epoch": 2.21, + "learning_rate": 3.280976313018239e-05, + "loss": 1.3499, + "step": 710 + }, + { + "epoch": 2.21, + "learning_rate": 3.256597457941429e-05, + "loss": 1.3371, + "step": 711 + }, + { + "epoch": 2.21, + "learning_rate": 3.232291884075373e-05, + "loss": 1.312, + "step": 712 + }, + { + "epoch": 2.22, + "learning_rate": 3.208059855551101e-05, + "loss": 1.3502, + "step": 713 + }, + { + "epoch": 2.22, + "learning_rate": 3.18390163570042e-05, + "loss": 1.3094, + "step": 714 + }, + { + "epoch": 2.22, + "learning_rate": 3.1598174870530604e-05, + "loss": 1.3181, + "step": 715 + }, + { + "epoch": 2.23, + "learning_rate": 3.1358076713338014e-05, + "loss": 1.3011, + "step": 716 + }, + { + "epoch": 2.23, + "learning_rate": 3.1118724494596405e-05, + "loss": 1.3054, + "step": 717 + }, + { + "epoch": 2.23, + "learning_rate": 3.0880120815369694e-05, + "loss": 1.3215, + "step": 718 + }, + { + "epoch": 2.24, + "learning_rate": 3.0642268268587136e-05, + "loss": 1.2908, + "step": 719 + }, + { + "epoch": 2.24, + "learning_rate": 3.0405169439015557e-05, + "loss": 1.3334, + "step": 720 + }, + { + "epoch": 2.24, + "learning_rate": 3.0168826903230906e-05, + "loss": 1.3275, + "step": 721 + }, + { + "epoch": 2.25, + "learning_rate": 2.9933243229590568e-05, + "loss": 1.3329, + "step": 722 + }, + { + "epoch": 2.25, + "learning_rate": 2.969842097820519e-05, + "loss": 1.3185, + "step": 723 + }, + { + "epoch": 2.25, + "learning_rate": 2.9464362700910943e-05, + "loss": 1.3443, + "step": 724 + }, + { + "epoch": 2.26, + "learning_rate": 2.9231070941241988e-05, + "loss": 1.3034, + "step": 725 + }, + { + "epoch": 2.26, + "learning_rate": 2.899854823440241e-05, + "loss": 1.304, + "step": 726 + }, + { + "epoch": 2.26, + "learning_rate": 2.8766797107239164e-05, + "loss": 1.3136, + "step": 727 + }, + { + "epoch": 2.26, + "learning_rate": 2.8535820078214236e-05, + "loss": 1.2894, + "step": 728 + }, + { + "epoch": 2.27, + "learning_rate": 2.8305619657377413e-05, + "loss": 1.3303, + "step": 729 + }, + { + "epoch": 2.27, + "learning_rate": 2.8076198346339113e-05, + "loss": 1.3158, + "step": 730 + }, + { + "epoch": 2.27, + "learning_rate": 2.7847558638242964e-05, + "loss": 1.3071, + "step": 731 + }, + { + "epoch": 2.28, + "learning_rate": 2.7619703017738917e-05, + "loss": 1.2951, + "step": 732 + }, + { + "epoch": 2.28, + "learning_rate": 2.7392633960956127e-05, + "loss": 1.3138, + "step": 733 + }, + { + "epoch": 2.28, + "learning_rate": 2.7166353935476085e-05, + "loss": 1.3523, + "step": 734 + }, + { + "epoch": 2.29, + "learning_rate": 2.694086540030587e-05, + "loss": 1.2937, + "step": 735 + }, + { + "epoch": 2.29, + "learning_rate": 2.671617080585127e-05, + "loss": 1.3493, + "step": 736 + }, + { + "epoch": 2.29, + "learning_rate": 2.6492272593890267e-05, + "loss": 1.309, + "step": 737 + }, + { + "epoch": 2.3, + "learning_rate": 2.6269173197546527e-05, + "loss": 1.3188, + "step": 738 + }, + { + "epoch": 2.3, + "learning_rate": 2.6046875041262852e-05, + "loss": 1.3202, + "step": 739 + }, + { + "epoch": 2.3, + "learning_rate": 2.5825380540774914e-05, + "loss": 1.359, + "step": 740 + }, + { + "epoch": 2.3, + "learning_rate": 2.560469210308497e-05, + "loss": 1.2837, + "step": 741 + }, + { + "epoch": 2.31, + "learning_rate": 2.5384812126435697e-05, + "loss": 1.3195, + "step": 742 + }, + { + "epoch": 2.31, + "learning_rate": 2.5165743000284213e-05, + "loss": 1.2797, + "step": 743 + }, + { + "epoch": 2.31, + "learning_rate": 2.4947487105275945e-05, + "loss": 1.3656, + "step": 744 + }, + { + "epoch": 2.32, + "learning_rate": 2.4730046813218987e-05, + "loss": 1.3094, + "step": 745 + }, + { + "epoch": 2.32, + "learning_rate": 2.451342448705811e-05, + "loss": 1.3176, + "step": 746 + }, + { + "epoch": 2.32, + "learning_rate": 2.4297622480849104e-05, + "loss": 1.3318, + "step": 747 + }, + { + "epoch": 2.33, + "learning_rate": 2.408264313973343e-05, + "loss": 1.3367, + "step": 748 + }, + { + "epoch": 2.33, + "learning_rate": 2.3868488799912414e-05, + "loss": 1.2717, + "step": 749 + }, + { + "epoch": 2.33, + "learning_rate": 2.3655161788622138e-05, + "loss": 1.3328, + "step": 750 + }, + { + "epoch": 2.33, + "eval_loss": 1.6713805198669434, + "eval_runtime": 233.7116, + "eval_samples_per_second": 16.358, + "eval_steps_per_second": 4.091, + "step": 750 + }, + { + "epoch": 2.34, + "learning_rate": 2.344266442410794e-05, + "loss": 1.3325, + "step": 751 + }, + { + "epoch": 2.34, + "learning_rate": 2.323099901559931e-05, + "loss": 1.3277, + "step": 752 + }, + { + "epoch": 2.34, + "learning_rate": 2.302016786328488e-05, + "loss": 1.3567, + "step": 753 + }, + { + "epoch": 2.35, + "learning_rate": 2.281017325828716e-05, + "loss": 1.3087, + "step": 754 + }, + { + "epoch": 2.35, + "learning_rate": 2.260101748263803e-05, + "loss": 1.3173, + "step": 755 + }, + { + "epoch": 2.35, + "learning_rate": 2.2392702809253596e-05, + "loss": 1.3234, + "step": 756 + }, + { + "epoch": 2.35, + "learning_rate": 2.218523150190962e-05, + "loss": 1.3649, + "step": 757 + }, + { + "epoch": 2.36, + "learning_rate": 2.1978605815217025e-05, + "loss": 1.3433, + "step": 758 + }, + { + "epoch": 2.36, + "learning_rate": 2.177282799459719e-05, + "loss": 1.2992, + "step": 759 + }, + { + "epoch": 2.36, + "learning_rate": 2.1567900276257703e-05, + "loss": 1.3004, + "step": 760 + }, + { + "epoch": 2.37, + "learning_rate": 2.1363824887167993e-05, + "loss": 1.2894, + "step": 761 + }, + { + "epoch": 2.37, + "learning_rate": 2.1160604045035115e-05, + "loss": 1.3151, + "step": 762 + }, + { + "epoch": 2.37, + "learning_rate": 2.0958239958279756e-05, + "loss": 1.2694, + "step": 763 + }, + { + "epoch": 2.38, + "learning_rate": 2.0756734826012104e-05, + "loss": 1.2979, + "step": 764 + }, + { + "epoch": 2.38, + "learning_rate": 2.0556090838007957e-05, + "loss": 1.3187, + "step": 765 + }, + { + "epoch": 2.38, + "learning_rate": 2.0356310174685124e-05, + "loss": 1.3255, + "step": 766 + }, + { + "epoch": 2.39, + "learning_rate": 2.0157395007079428e-05, + "loss": 1.3623, + "step": 767 + }, + { + "epoch": 2.39, + "learning_rate": 1.9959347496821333e-05, + "loss": 1.317, + "step": 768 + }, + { + "epoch": 2.39, + "learning_rate": 1.9762169796112397e-05, + "loss": 1.3102, + "step": 769 + }, + { + "epoch": 2.4, + "learning_rate": 1.956586404770182e-05, + "loss": 1.244, + "step": 770 + }, + { + "epoch": 2.4, + "learning_rate": 1.937043238486329e-05, + "loss": 1.3051, + "step": 771 + }, + { + "epoch": 2.4, + "learning_rate": 1.9175876931371626e-05, + "loss": 1.2869, + "step": 772 + }, + { + "epoch": 2.4, + "learning_rate": 1.898219980147993e-05, + "loss": 1.3365, + "step": 773 + }, + { + "epoch": 2.41, + "learning_rate": 1.878940309989633e-05, + "loss": 1.3091, + "step": 774 + }, + { + "epoch": 2.41, + "learning_rate": 1.859748892176133e-05, + "loss": 1.3401, + "step": 775 + }, + { + "epoch": 2.41, + "learning_rate": 1.840645935262497e-05, + "loss": 1.3562, + "step": 776 + }, + { + "epoch": 2.42, + "learning_rate": 1.8216316468424098e-05, + "loss": 1.3201, + "step": 777 + }, + { + "epoch": 2.42, + "learning_rate": 1.8027062335459977e-05, + "loss": 1.2757, + "step": 778 + }, + { + "epoch": 2.42, + "learning_rate": 1.7838699010375625e-05, + "loss": 1.3541, + "step": 779 + }, + { + "epoch": 2.43, + "learning_rate": 1.7651228540133623e-05, + "loss": 1.3491, + "step": 780 + }, + { + "epoch": 2.43, + "learning_rate": 1.7464652961993768e-05, + "loss": 1.2903, + "step": 781 + }, + { + "epoch": 2.43, + "learning_rate": 1.727897430349097e-05, + "loss": 1.3879, + "step": 782 + }, + { + "epoch": 2.44, + "learning_rate": 1.7094194582413326e-05, + "loss": 1.3311, + "step": 783 + }, + { + "epoch": 2.44, + "learning_rate": 1.6910315806779987e-05, + "loss": 1.34, + "step": 784 + }, + { + "epoch": 2.44, + "learning_rate": 1.6727339974819456e-05, + "loss": 1.3331, + "step": 785 + }, + { + "epoch": 2.44, + "learning_rate": 1.6545269074947922e-05, + "loss": 1.3164, + "step": 786 + }, + { + "epoch": 2.45, + "learning_rate": 1.636410508574753e-05, + "loss": 1.3505, + "step": 787 + }, + { + "epoch": 2.45, + "learning_rate": 1.618384997594494e-05, + "loss": 1.2556, + "step": 788 + }, + { + "epoch": 2.45, + "learning_rate": 1.6004505704389983e-05, + "loss": 1.3023, + "step": 789 + }, + { + "epoch": 2.46, + "learning_rate": 1.5826074220034226e-05, + "loss": 1.3524, + "step": 790 + }, + { + "epoch": 2.46, + "learning_rate": 1.5648557461910018e-05, + "loss": 1.3215, + "step": 791 + }, + { + "epoch": 2.46, + "learning_rate": 1.547195735910919e-05, + "loss": 1.3593, + "step": 792 + }, + { + "epoch": 2.47, + "learning_rate": 1.5296275830762206e-05, + "loss": 1.3482, + "step": 793 + }, + { + "epoch": 2.47, + "learning_rate": 1.5121514786017365e-05, + "loss": 1.3521, + "step": 794 + }, + { + "epoch": 2.47, + "learning_rate": 1.4947676124019839e-05, + "loss": 1.3138, + "step": 795 + }, + { + "epoch": 2.48, + "learning_rate": 1.4774761733891319e-05, + "loss": 1.3701, + "step": 796 + }, + { + "epoch": 2.48, + "learning_rate": 1.4602773494709254e-05, + "loss": 1.3408, + "step": 797 + }, + { + "epoch": 2.48, + "learning_rate": 1.4431713275486602e-05, + "loss": 1.343, + "step": 798 + }, + { + "epoch": 2.49, + "learning_rate": 1.4261582935151352e-05, + "loss": 1.2744, + "step": 799 + }, + { + "epoch": 2.49, + "learning_rate": 1.4092384322526442e-05, + "loss": 1.3453, + "step": 800 + }, + { + "epoch": 2.49, + "eval_loss": 1.6718111038208008, + "eval_runtime": 233.7605, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 800 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 2.2434173260136448e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-800/training_args.bin b/checkpoint-800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-850/README.md b/checkpoint-850/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-850/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-850/adapter_config.json b/checkpoint-850/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-850/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-850/adapter_model.bin b/checkpoint-850/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..6262b6fdb45d6362c872ff5444adaa29dcf3b08e --- /dev/null +++ b/checkpoint-850/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a686e7bf9ead131604c0946468b9b96e9c1b04ecc8e0712d3853b7f548cac68f +size 84046925 diff --git a/checkpoint-850/optimizer.pt b/checkpoint-850/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..9937b085a1e0a7f1958426c12dffd9e1b3851cd9 --- /dev/null +++ b/checkpoint-850/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2a3c6000255b5d7ab6050cacba171873e1478099822acd972cea61fca586f123 +size 168039557 diff --git a/checkpoint-850/rng_state.pth b/checkpoint-850/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..8cb3b7e69ee7732d99e6c7c1873eebe4d82f1613 --- /dev/null +++ b/checkpoint-850/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8b30bb8f643242903db9074752d816dc4784a95c39630c510b2c59127e92944 +size 14575 diff --git a/checkpoint-850/scheduler.pt b/checkpoint-850/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..da157db79d9db6aa24bf82448415275ccf0f2980 --- /dev/null +++ b/checkpoint-850/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e633213ce0ad48888071dc4463adef8c9d50580bce2b135ad39db4a150c84c8b +size 627 diff --git a/checkpoint-850/trainer_state.json b/checkpoint-850/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..3512065d4314b1ea47f663d42f4bb7c353278af2 --- /dev/null +++ b/checkpoint-850/trainer_state.json @@ -0,0 +1,5255 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 2.6438569206842923, + "eval_steps": 50, + "global_step": 850, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012781362737859735, + "loss": 1.4867, + "step": 401 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012749683144111095, + "loss": 1.4923, + "step": 402 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012717973669288513, + "loss": 1.4858, + "step": 403 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012686234657981933, + "loss": 1.4464, + "step": 404 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012654466455102272, + "loss": 1.4598, + "step": 405 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012622669405877685, + "loss": 1.4237, + "step": 406 + }, + { + "epoch": 1.27, + "learning_rate": 0.0001259084385584979, + "loss": 1.475, + "step": 407 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012558990150869935, + "loss": 1.5201, + "step": 408 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012527108637095427, + "loss": 1.4735, + "step": 409 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012495199660985767, + "loss": 1.4676, + "step": 410 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012463263569298914, + "loss": 1.4671, + "step": 411 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012431300709087468, + "loss": 1.4724, + "step": 412 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012399311427694945, + "loss": 1.5451, + "step": 413 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001236729607275197, + "loss": 1.492, + "step": 414 + }, + { + "epoch": 1.29, + "learning_rate": 0.00012335254992172512, + "loss": 1.5186, + "step": 415 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001230318853415012, + "loss": 1.4622, + "step": 416 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012271097047154096, + "loss": 1.4937, + "step": 417 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012238980879925756, + "loss": 1.4575, + "step": 418 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012206840381474608, + "loss": 1.4801, + "step": 419 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012174675901074577, + "loss": 1.4523, + "step": 420 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012142487788260191, + "loss": 1.4957, + "step": 421 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012110276392822799, + "loss": 1.4757, + "step": 422 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001207804206480677, + "loss": 1.4769, + "step": 423 + }, + { + "epoch": 1.32, + "learning_rate": 0.00012045785154505676, + "loss": 1.4435, + "step": 424 + }, + { + "epoch": 1.32, + "learning_rate": 0.000120135060124585, + "loss": 1.5211, + "step": 425 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011981204989445811, + "loss": 1.4248, + "step": 426 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011948882436485969, + "loss": 1.4883, + "step": 427 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011916538704831293, + "loss": 1.4919, + "step": 428 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011884174145964262, + "loss": 1.4689, + "step": 429 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011851789111593688, + "loss": 1.4071, + "step": 430 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011819383953650874, + "loss": 1.4418, + "step": 431 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011786959024285826, + "loss": 1.5206, + "step": 432 + }, + { + "epoch": 1.35, + "learning_rate": 0.00011754514675863408, + "loss": 1.446, + "step": 433 + }, + { + "epoch": 1.35, + "learning_rate": 0.000117220512609595, + "loss": 1.5165, + "step": 434 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001168956913235719, + "loss": 1.4119, + "step": 435 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011657068643042924, + "loss": 1.503, + "step": 436 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011624550146202682, + "loss": 1.4573, + "step": 437 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011592013995218123, + "loss": 1.4707, + "step": 438 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011559460543662768, + "loss": 1.4304, + "step": 439 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011526890145298137, + "loss": 1.4465, + "step": 440 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001149430315406991, + "loss": 1.4912, + "step": 441 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001146169992410409, + "loss": 1.4549, + "step": 442 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011429080809703145, + "loss": 1.4528, + "step": 443 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011396446165342165, + "loss": 1.4148, + "step": 444 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011363796345665001, + "loss": 1.467, + "step": 445 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001133113170548041, + "loss": 1.492, + "step": 446 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011298452599758217, + "loss": 1.5244, + "step": 447 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011265759383625436, + "loss": 1.4553, + "step": 448 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001123305241236243, + "loss": 1.4764, + "step": 449 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011200332041399027, + "loss": 1.4354, + "step": 450 + }, + { + "epoch": 1.4, + "eval_loss": 1.6193681955337524, + "eval_runtime": 233.6751, + "eval_samples_per_second": 16.36, + "eval_steps_per_second": 4.091, + "step": 450 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011167598626310682, + "loss": 1.4946, + "step": 451 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011134852522814596, + "loss": 1.4558, + "step": 452 + }, + { + "epoch": 1.41, + "learning_rate": 0.0001110209408676586, + "loss": 1.4549, + "step": 453 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011069323674153585, + "loss": 1.4992, + "step": 454 + }, + { + "epoch": 1.42, + "learning_rate": 0.0001103654164109702, + "loss": 1.4828, + "step": 455 + }, + { + "epoch": 1.42, + "learning_rate": 0.00011003748343841711, + "loss": 1.4939, + "step": 456 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010970944138755604, + "loss": 1.4761, + "step": 457 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010938129382325184, + "loss": 1.4394, + "step": 458 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010905304431151602, + "loss": 1.4852, + "step": 459 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010872469641946783, + "loss": 1.4479, + "step": 460 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010839625371529583, + "loss": 1.5161, + "step": 461 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010806771976821872, + "loss": 1.5104, + "step": 462 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001077390981484469, + "loss": 1.5056, + "step": 463 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010741039242714337, + "loss": 1.4919, + "step": 464 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010708160617638521, + "loss": 1.4605, + "step": 465 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010675274296912452, + "loss": 1.5191, + "step": 466 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010642380637914975, + "loss": 1.4504, + "step": 467 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010609479998104684, + "loss": 1.4619, + "step": 468 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010576572735016016, + "loss": 1.4619, + "step": 469 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010543659206255409, + "loss": 1.4962, + "step": 470 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010510739769497378, + "loss": 1.4901, + "step": 471 + }, + { + "epoch": 1.47, + "learning_rate": 0.0001047781478248063, + "loss": 1.4708, + "step": 472 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010444884603004213, + "loss": 1.4756, + "step": 473 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010411949588923577, + "loss": 1.3948, + "step": 474 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010379010098146728, + "loss": 1.5183, + "step": 475 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010346066488630308, + "loss": 1.4252, + "step": 476 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010313119118375727, + "loss": 1.4686, + "step": 477 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010280168345425256, + "loss": 1.5285, + "step": 478 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010247214527858149, + "loss": 1.4649, + "step": 479 + }, + { + "epoch": 1.49, + "learning_rate": 0.0001021425802378674, + "loss": 1.4602, + "step": 480 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010181299191352566, + "loss": 1.5102, + "step": 481 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010148338388722465, + "loss": 1.4894, + "step": 482 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010115375974084677, + "loss": 1.501, + "step": 483 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010082412305644964, + "loss": 1.481, + "step": 484 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010049447741622717, + "loss": 1.4927, + "step": 485 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010016482640247058, + "loss": 1.512, + "step": 486 + }, + { + "epoch": 1.51, + "learning_rate": 9.983517359752945e-05, + "loss": 1.4622, + "step": 487 + }, + { + "epoch": 1.52, + "learning_rate": 9.950552258377284e-05, + "loss": 1.4956, + "step": 488 + }, + { + "epoch": 1.52, + "learning_rate": 9.917587694355037e-05, + "loss": 1.493, + "step": 489 + }, + { + "epoch": 1.52, + "learning_rate": 9.884624025915328e-05, + "loss": 1.4629, + "step": 490 + }, + { + "epoch": 1.53, + "learning_rate": 9.851661611277537e-05, + "loss": 1.4531, + "step": 491 + }, + { + "epoch": 1.53, + "learning_rate": 9.818700808647435e-05, + "loss": 1.4656, + "step": 492 + }, + { + "epoch": 1.53, + "learning_rate": 9.785741976213261e-05, + "loss": 1.4982, + "step": 493 + }, + { + "epoch": 1.54, + "learning_rate": 9.752785472141854e-05, + "loss": 1.5053, + "step": 494 + }, + { + "epoch": 1.54, + "learning_rate": 9.719831654574745e-05, + "loss": 1.4619, + "step": 495 + }, + { + "epoch": 1.54, + "learning_rate": 9.686880881624275e-05, + "loss": 1.486, + "step": 496 + }, + { + "epoch": 1.55, + "learning_rate": 9.653933511369696e-05, + "loss": 1.4788, + "step": 497 + }, + { + "epoch": 1.55, + "learning_rate": 9.620989901853275e-05, + "loss": 1.4663, + "step": 498 + }, + { + "epoch": 1.55, + "learning_rate": 9.588050411076424e-05, + "loss": 1.5138, + "step": 499 + }, + { + "epoch": 1.56, + "learning_rate": 9.555115396995788e-05, + "loss": 1.4427, + "step": 500 + }, + { + "epoch": 1.56, + "eval_loss": 1.6187018156051636, + "eval_runtime": 233.6591, + "eval_samples_per_second": 16.361, + "eval_steps_per_second": 4.091, + "step": 500 + }, + { + "epoch": 1.56, + "learning_rate": 9.522185217519371e-05, + "loss": 1.4696, + "step": 501 + }, + { + "epoch": 1.56, + "learning_rate": 9.489260230502626e-05, + "loss": 1.4052, + "step": 502 + }, + { + "epoch": 1.56, + "learning_rate": 9.45634079374459e-05, + "loss": 1.4688, + "step": 503 + }, + { + "epoch": 1.57, + "learning_rate": 9.423427264983986e-05, + "loss": 1.4266, + "step": 504 + }, + { + "epoch": 1.57, + "learning_rate": 9.390520001895321e-05, + "loss": 1.4887, + "step": 505 + }, + { + "epoch": 1.57, + "learning_rate": 9.357619362085027e-05, + "loss": 1.4992, + "step": 506 + }, + { + "epoch": 1.58, + "learning_rate": 9.32472570308755e-05, + "loss": 1.4626, + "step": 507 + }, + { + "epoch": 1.58, + "learning_rate": 9.291839382361481e-05, + "loss": 1.4984, + "step": 508 + }, + { + "epoch": 1.58, + "learning_rate": 9.258960757285664e-05, + "loss": 1.3692, + "step": 509 + }, + { + "epoch": 1.59, + "learning_rate": 9.226090185155314e-05, + "loss": 1.4325, + "step": 510 + }, + { + "epoch": 1.59, + "learning_rate": 9.19322802317813e-05, + "loss": 1.5049, + "step": 511 + }, + { + "epoch": 1.59, + "learning_rate": 9.160374628470421e-05, + "loss": 1.4589, + "step": 512 + }, + { + "epoch": 1.6, + "learning_rate": 9.127530358053218e-05, + "loss": 1.4291, + "step": 513 + }, + { + "epoch": 1.6, + "learning_rate": 9.094695568848402e-05, + "loss": 1.4474, + "step": 514 + }, + { + "epoch": 1.6, + "learning_rate": 9.061870617674817e-05, + "loss": 1.513, + "step": 515 + }, + { + "epoch": 1.6, + "learning_rate": 9.029055861244397e-05, + "loss": 1.4609, + "step": 516 + }, + { + "epoch": 1.61, + "learning_rate": 8.99625165615829e-05, + "loss": 1.5144, + "step": 517 + }, + { + "epoch": 1.61, + "learning_rate": 8.963458358902985e-05, + "loss": 1.4294, + "step": 518 + }, + { + "epoch": 1.61, + "learning_rate": 8.93067632584642e-05, + "loss": 1.4516, + "step": 519 + }, + { + "epoch": 1.62, + "learning_rate": 8.897905913234143e-05, + "loss": 1.4659, + "step": 520 + }, + { + "epoch": 1.62, + "learning_rate": 8.865147477185405e-05, + "loss": 1.4787, + "step": 521 + }, + { + "epoch": 1.62, + "learning_rate": 8.832401373689319e-05, + "loss": 1.4601, + "step": 522 + }, + { + "epoch": 1.63, + "learning_rate": 8.799667958600973e-05, + "loss": 1.4955, + "step": 523 + }, + { + "epoch": 1.63, + "learning_rate": 8.766947587637573e-05, + "loss": 1.4231, + "step": 524 + }, + { + "epoch": 1.63, + "learning_rate": 8.734240616374565e-05, + "loss": 1.4952, + "step": 525 + }, + { + "epoch": 1.64, + "learning_rate": 8.701547400241788e-05, + "loss": 1.4707, + "step": 526 + }, + { + "epoch": 1.64, + "learning_rate": 8.668868294519593e-05, + "loss": 1.5023, + "step": 527 + }, + { + "epoch": 1.64, + "learning_rate": 8.636203654335002e-05, + "loss": 1.4702, + "step": 528 + }, + { + "epoch": 1.65, + "learning_rate": 8.603553834657836e-05, + "loss": 1.4399, + "step": 529 + }, + { + "epoch": 1.65, + "learning_rate": 8.570919190296855e-05, + "loss": 1.5175, + "step": 530 + }, + { + "epoch": 1.65, + "learning_rate": 8.53830007589591e-05, + "loss": 1.4715, + "step": 531 + }, + { + "epoch": 1.65, + "learning_rate": 8.505696845930096e-05, + "loss": 1.5292, + "step": 532 + }, + { + "epoch": 1.66, + "learning_rate": 8.473109854701869e-05, + "loss": 1.5287, + "step": 533 + }, + { + "epoch": 1.66, + "learning_rate": 8.440539456337235e-05, + "loss": 1.4762, + "step": 534 + }, + { + "epoch": 1.66, + "learning_rate": 8.407986004781879e-05, + "loss": 1.4536, + "step": 535 + }, + { + "epoch": 1.67, + "learning_rate": 8.375449853797322e-05, + "loss": 1.5018, + "step": 536 + }, + { + "epoch": 1.67, + "learning_rate": 8.342931356957076e-05, + "loss": 1.4723, + "step": 537 + }, + { + "epoch": 1.67, + "learning_rate": 8.310430867642812e-05, + "loss": 1.4905, + "step": 538 + }, + { + "epoch": 1.68, + "learning_rate": 8.277948739040503e-05, + "loss": 1.4651, + "step": 539 + }, + { + "epoch": 1.68, + "learning_rate": 8.245485324136597e-05, + "loss": 1.4482, + "step": 540 + }, + { + "epoch": 1.68, + "learning_rate": 8.213040975714175e-05, + "loss": 1.3977, + "step": 541 + }, + { + "epoch": 1.69, + "learning_rate": 8.180616046349129e-05, + "loss": 1.5594, + "step": 542 + }, + { + "epoch": 1.69, + "learning_rate": 8.148210888406316e-05, + "loss": 1.4995, + "step": 543 + }, + { + "epoch": 1.69, + "learning_rate": 8.115825854035737e-05, + "loss": 1.5106, + "step": 544 + }, + { + "epoch": 1.7, + "learning_rate": 8.083461295168707e-05, + "loss": 1.4219, + "step": 545 + }, + { + "epoch": 1.7, + "learning_rate": 8.051117563514036e-05, + "loss": 1.4766, + "step": 546 + }, + { + "epoch": 1.7, + "learning_rate": 8.018795010554193e-05, + "loss": 1.5241, + "step": 547 + }, + { + "epoch": 1.7, + "learning_rate": 7.986493987541502e-05, + "loss": 1.4673, + "step": 548 + }, + { + "epoch": 1.71, + "learning_rate": 7.954214845494325e-05, + "loss": 1.4236, + "step": 549 + }, + { + "epoch": 1.71, + "learning_rate": 7.921957935193232e-05, + "loss": 1.4687, + "step": 550 + }, + { + "epoch": 1.71, + "eval_loss": 1.617763876914978, + "eval_runtime": 233.6334, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 550 + }, + { + "epoch": 1.71, + "learning_rate": 7.889723607177202e-05, + "loss": 1.4412, + "step": 551 + }, + { + "epoch": 1.72, + "learning_rate": 7.857512211739813e-05, + "loss": 1.4464, + "step": 552 + }, + { + "epoch": 1.72, + "learning_rate": 7.825324098925427e-05, + "loss": 1.4043, + "step": 553 + }, + { + "epoch": 1.72, + "learning_rate": 7.793159618525393e-05, + "loss": 1.4384, + "step": 554 + }, + { + "epoch": 1.73, + "learning_rate": 7.761019120074245e-05, + "loss": 1.4781, + "step": 555 + }, + { + "epoch": 1.73, + "learning_rate": 7.728902952845905e-05, + "loss": 1.4311, + "step": 556 + }, + { + "epoch": 1.73, + "learning_rate": 7.696811465849883e-05, + "loss": 1.4926, + "step": 557 + }, + { + "epoch": 1.74, + "learning_rate": 7.664745007827489e-05, + "loss": 1.4739, + "step": 558 + }, + { + "epoch": 1.74, + "learning_rate": 7.632703927248033e-05, + "loss": 1.509, + "step": 559 + }, + { + "epoch": 1.74, + "learning_rate": 7.60068857230506e-05, + "loss": 1.4555, + "step": 560 + }, + { + "epoch": 1.74, + "learning_rate": 7.568699290912533e-05, + "loss": 1.4588, + "step": 561 + }, + { + "epoch": 1.75, + "learning_rate": 7.536736430701088e-05, + "loss": 1.4574, + "step": 562 + }, + { + "epoch": 1.75, + "learning_rate": 7.504800339014232e-05, + "loss": 1.4805, + "step": 563 + }, + { + "epoch": 1.75, + "learning_rate": 7.472891362904577e-05, + "loss": 1.5081, + "step": 564 + }, + { + "epoch": 1.76, + "learning_rate": 7.441009849130067e-05, + "loss": 1.5081, + "step": 565 + }, + { + "epoch": 1.76, + "learning_rate": 7.409156144150213e-05, + "loss": 1.4548, + "step": 566 + }, + { + "epoch": 1.76, + "learning_rate": 7.377330594122317e-05, + "loss": 1.4478, + "step": 567 + }, + { + "epoch": 1.77, + "learning_rate": 7.34553354489773e-05, + "loss": 1.5048, + "step": 568 + }, + { + "epoch": 1.77, + "learning_rate": 7.31376534201807e-05, + "loss": 1.4889, + "step": 569 + }, + { + "epoch": 1.77, + "learning_rate": 7.282026330711489e-05, + "loss": 1.5045, + "step": 570 + }, + { + "epoch": 1.78, + "learning_rate": 7.250316855888906e-05, + "loss": 1.4352, + "step": 571 + }, + { + "epoch": 1.78, + "learning_rate": 7.218637262140268e-05, + "loss": 1.4881, + "step": 572 + }, + { + "epoch": 1.78, + "learning_rate": 7.186987893730797e-05, + "loss": 1.449, + "step": 573 + }, + { + "epoch": 1.79, + "learning_rate": 7.155369094597253e-05, + "loss": 1.4146, + "step": 574 + }, + { + "epoch": 1.79, + "learning_rate": 7.1237812083442e-05, + "loss": 1.4462, + "step": 575 + }, + { + "epoch": 1.79, + "learning_rate": 7.092224578240269e-05, + "loss": 1.4509, + "step": 576 + }, + { + "epoch": 1.79, + "learning_rate": 7.060699547214427e-05, + "loss": 1.4483, + "step": 577 + }, + { + "epoch": 1.8, + "learning_rate": 7.029206457852247e-05, + "loss": 1.4348, + "step": 578 + }, + { + "epoch": 1.8, + "learning_rate": 6.997745652392191e-05, + "loss": 1.4931, + "step": 579 + }, + { + "epoch": 1.8, + "learning_rate": 6.966317472721897e-05, + "loss": 1.4132, + "step": 580 + }, + { + "epoch": 1.81, + "learning_rate": 6.934922260374437e-05, + "loss": 1.3974, + "step": 581 + }, + { + "epoch": 1.81, + "learning_rate": 6.903560356524641e-05, + "loss": 1.4326, + "step": 582 + }, + { + "epoch": 1.81, + "learning_rate": 6.872232101985363e-05, + "loss": 1.4349, + "step": 583 + }, + { + "epoch": 1.82, + "learning_rate": 6.840937837203791e-05, + "loss": 1.4528, + "step": 584 + }, + { + "epoch": 1.82, + "learning_rate": 6.809677902257742e-05, + "loss": 1.4365, + "step": 585 + }, + { + "epoch": 1.82, + "learning_rate": 6.778452636851968e-05, + "loss": 1.4702, + "step": 586 + }, + { + "epoch": 1.83, + "learning_rate": 6.747262380314463e-05, + "loss": 1.458, + "step": 587 + }, + { + "epoch": 1.83, + "learning_rate": 6.71610747159277e-05, + "loss": 1.5413, + "step": 588 + }, + { + "epoch": 1.83, + "learning_rate": 6.684988249250314e-05, + "loss": 1.4205, + "step": 589 + }, + { + "epoch": 1.84, + "learning_rate": 6.653905051462708e-05, + "loss": 1.4643, + "step": 590 + }, + { + "epoch": 1.84, + "learning_rate": 6.622858216014084e-05, + "loss": 1.4071, + "step": 591 + }, + { + "epoch": 1.84, + "learning_rate": 6.591848080293418e-05, + "loss": 1.4669, + "step": 592 + }, + { + "epoch": 1.84, + "learning_rate": 6.56087498129087e-05, + "loss": 1.5062, + "step": 593 + }, + { + "epoch": 1.85, + "learning_rate": 6.52993925559412e-05, + "loss": 1.4334, + "step": 594 + }, + { + "epoch": 1.85, + "learning_rate": 6.499041239384698e-05, + "loss": 1.4696, + "step": 595 + }, + { + "epoch": 1.85, + "learning_rate": 6.468181268434354e-05, + "loss": 1.4575, + "step": 596 + }, + { + "epoch": 1.86, + "learning_rate": 6.437359678101389e-05, + "loss": 1.4432, + "step": 597 + }, + { + "epoch": 1.86, + "learning_rate": 6.406576803327022e-05, + "loss": 1.5047, + "step": 598 + }, + { + "epoch": 1.86, + "learning_rate": 6.375832978631743e-05, + "loss": 1.4297, + "step": 599 + }, + { + "epoch": 1.87, + "learning_rate": 6.345128538111685e-05, + "loss": 1.461, + "step": 600 + }, + { + "epoch": 1.87, + "eval_loss": 1.6174333095550537, + "eval_runtime": 233.649, + "eval_samples_per_second": 16.362, + "eval_steps_per_second": 4.092, + "step": 600 + }, + { + "epoch": 1.87, + "learning_rate": 6.314463815434988e-05, + "loss": 1.4978, + "step": 601 + }, + { + "epoch": 1.87, + "learning_rate": 6.283839143838169e-05, + "loss": 1.426, + "step": 602 + }, + { + "epoch": 1.88, + "learning_rate": 6.253254856122511e-05, + "loss": 1.4657, + "step": 603 + }, + { + "epoch": 1.88, + "learning_rate": 6.222711284650444e-05, + "loss": 1.5282, + "step": 604 + }, + { + "epoch": 1.88, + "learning_rate": 6.192208761341925e-05, + "loss": 1.4897, + "step": 605 + }, + { + "epoch": 1.88, + "learning_rate": 6.161747617670839e-05, + "loss": 1.4827, + "step": 606 + }, + { + "epoch": 1.89, + "learning_rate": 6.131328184661396e-05, + "loss": 1.4507, + "step": 607 + }, + { + "epoch": 1.89, + "learning_rate": 6.100950792884533e-05, + "loss": 1.4461, + "step": 608 + }, + { + "epoch": 1.89, + "learning_rate": 6.070615772454312e-05, + "loss": 1.4187, + "step": 609 + }, + { + "epoch": 1.9, + "learning_rate": 6.040323453024351e-05, + "loss": 1.4704, + "step": 610 + }, + { + "epoch": 1.9, + "learning_rate": 6.0100741637842316e-05, + "loss": 1.4869, + "step": 611 + }, + { + "epoch": 1.9, + "learning_rate": 5.979868233455917e-05, + "loss": 1.4657, + "step": 612 + }, + { + "epoch": 1.91, + "learning_rate": 5.949705990290186e-05, + "loss": 1.4234, + "step": 613 + }, + { + "epoch": 1.91, + "learning_rate": 5.919587762063072e-05, + "loss": 1.4519, + "step": 614 + }, + { + "epoch": 1.91, + "learning_rate": 5.889513876072283e-05, + "loss": 1.4588, + "step": 615 + }, + { + "epoch": 1.92, + "learning_rate": 5.859484659133663e-05, + "loss": 1.4867, + "step": 616 + }, + { + "epoch": 1.92, + "learning_rate": 5.829500437577626e-05, + "loss": 1.5157, + "step": 617 + }, + { + "epoch": 1.92, + "learning_rate": 5.799561537245628e-05, + "loss": 1.4492, + "step": 618 + }, + { + "epoch": 1.93, + "learning_rate": 5.769668283486607e-05, + "loss": 1.514, + "step": 619 + }, + { + "epoch": 1.93, + "learning_rate": 5.739821001153451e-05, + "loss": 1.5127, + "step": 620 + }, + { + "epoch": 1.93, + "learning_rate": 5.710020014599486e-05, + "loss": 1.4204, + "step": 621 + }, + { + "epoch": 1.93, + "learning_rate": 5.680265647674925e-05, + "loss": 1.4346, + "step": 622 + }, + { + "epoch": 1.94, + "learning_rate": 5.650558223723365e-05, + "loss": 1.4342, + "step": 623 + }, + { + "epoch": 1.94, + "learning_rate": 5.620898065578268e-05, + "loss": 1.4699, + "step": 624 + }, + { + "epoch": 1.94, + "learning_rate": 5.591285495559453e-05, + "loss": 1.5088, + "step": 625 + }, + { + "epoch": 1.95, + "learning_rate": 5.561720835469602e-05, + "loss": 1.5015, + "step": 626 + }, + { + "epoch": 1.95, + "learning_rate": 5.5322044065907475e-05, + "loss": 1.4243, + "step": 627 + }, + { + "epoch": 1.95, + "learning_rate": 5.502736529680785e-05, + "loss": 1.4553, + "step": 628 + }, + { + "epoch": 1.96, + "learning_rate": 5.47331752497001e-05, + "loss": 1.4419, + "step": 629 + }, + { + "epoch": 1.96, + "learning_rate": 5.443947712157587e-05, + "loss": 1.4172, + "step": 630 + }, + { + "epoch": 1.96, + "learning_rate": 5.41462741040814e-05, + "loss": 1.4888, + "step": 631 + }, + { + "epoch": 1.97, + "learning_rate": 5.385356938348234e-05, + "loss": 1.412, + "step": 632 + }, + { + "epoch": 1.97, + "learning_rate": 5.3561366140629274e-05, + "loss": 1.4327, + "step": 633 + }, + { + "epoch": 1.97, + "learning_rate": 5.326966755092334e-05, + "loss": 1.502, + "step": 634 + }, + { + "epoch": 1.98, + "learning_rate": 5.297847678428141e-05, + "loss": 1.4499, + "step": 635 + }, + { + "epoch": 1.98, + "learning_rate": 5.2687797005101834e-05, + "loss": 1.4783, + "step": 636 + }, + { + "epoch": 1.98, + "learning_rate": 5.239763137223004e-05, + "loss": 1.4378, + "step": 637 + }, + { + "epoch": 1.98, + "learning_rate": 5.21079830389241e-05, + "loss": 1.5055, + "step": 638 + }, + { + "epoch": 1.99, + "learning_rate": 5.18188551528207e-05, + "loss": 1.4963, + "step": 639 + }, + { + "epoch": 1.99, + "learning_rate": 5.1530250855900576e-05, + "loss": 1.4799, + "step": 640 + }, + { + "epoch": 1.99, + "learning_rate": 5.124217328445475e-05, + "loss": 1.4388, + "step": 641 + }, + { + "epoch": 2.0, + "learning_rate": 5.095462556905021e-05, + "loss": 1.484, + "step": 642 + }, + { + "epoch": 2.0, + "learning_rate": 5.0667610834495785e-05, + "loss": 1.4811, + "step": 643 + }, + { + "epoch": 2.0, + "learning_rate": 5.03811321998086e-05, + "loss": 1.2941, + "step": 644 + }, + { + "epoch": 2.01, + "learning_rate": 5.009519277817976e-05, + "loss": 1.3975, + "step": 645 + }, + { + "epoch": 2.01, + "learning_rate": 4.9809795676940815e-05, + "loss": 1.3432, + "step": 646 + }, + { + "epoch": 2.01, + "learning_rate": 4.952494399752976e-05, + "loss": 1.3014, + "step": 647 + }, + { + "epoch": 2.02, + "learning_rate": 4.924064083545744e-05, + "loss": 1.3491, + "step": 648 + }, + { + "epoch": 2.02, + "learning_rate": 4.8956889280274056e-05, + "loss": 1.3238, + "step": 649 + }, + { + "epoch": 2.02, + "learning_rate": 4.8673692415535186e-05, + "loss": 1.327, + "step": 650 + }, + { + "epoch": 2.02, + "eval_loss": 1.6340641975402832, + "eval_runtime": 233.6965, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 650 + }, + { + "epoch": 2.02, + "learning_rate": 4.83910533187688e-05, + "loss": 1.3208, + "step": 651 + }, + { + "epoch": 2.03, + "learning_rate": 4.810897506144137e-05, + "loss": 1.2936, + "step": 652 + }, + { + "epoch": 2.03, + "learning_rate": 4.782746070892472e-05, + "loss": 1.323, + "step": 653 + }, + { + "epoch": 2.03, + "learning_rate": 4.754651332046274e-05, + "loss": 1.3304, + "step": 654 + }, + { + "epoch": 2.04, + "learning_rate": 4.726613594913796e-05, + "loss": 1.2426, + "step": 655 + }, + { + "epoch": 2.04, + "learning_rate": 4.698633164183853e-05, + "loss": 1.2882, + "step": 656 + }, + { + "epoch": 2.04, + "learning_rate": 4.670710343922504e-05, + "loss": 1.3273, + "step": 657 + }, + { + "epoch": 2.05, + "learning_rate": 4.6428454375697485e-05, + "loss": 1.3391, + "step": 658 + }, + { + "epoch": 2.05, + "learning_rate": 4.615038747936237e-05, + "loss": 1.3143, + "step": 659 + }, + { + "epoch": 2.05, + "learning_rate": 4.587290577199965e-05, + "loss": 1.2846, + "step": 660 + }, + { + "epoch": 2.06, + "learning_rate": 4.559601226902998e-05, + "loss": 1.2887, + "step": 661 + }, + { + "epoch": 2.06, + "learning_rate": 4.531970997948203e-05, + "loss": 1.3239, + "step": 662 + }, + { + "epoch": 2.06, + "learning_rate": 4.504400190595958e-05, + "loss": 1.3552, + "step": 663 + }, + { + "epoch": 2.07, + "learning_rate": 4.476889104460907e-05, + "loss": 1.3554, + "step": 664 + }, + { + "epoch": 2.07, + "learning_rate": 4.4494380385086986e-05, + "loss": 1.3333, + "step": 665 + }, + { + "epoch": 2.07, + "learning_rate": 4.422047291052728e-05, + "loss": 1.3107, + "step": 666 + }, + { + "epoch": 2.07, + "learning_rate": 4.3947171597509176e-05, + "loss": 1.3228, + "step": 667 + }, + { + "epoch": 2.08, + "learning_rate": 4.367447941602453e-05, + "loss": 1.3224, + "step": 668 + }, + { + "epoch": 2.08, + "learning_rate": 4.3402399329445855e-05, + "loss": 1.2844, + "step": 669 + }, + { + "epoch": 2.08, + "learning_rate": 4.3130934294493885e-05, + "loss": 1.3352, + "step": 670 + }, + { + "epoch": 2.09, + "learning_rate": 4.286008726120543e-05, + "loss": 1.3217, + "step": 671 + }, + { + "epoch": 2.09, + "learning_rate": 4.2589861172901634e-05, + "loss": 1.2976, + "step": 672 + }, + { + "epoch": 2.09, + "learning_rate": 4.232025896615559e-05, + "loss": 1.3108, + "step": 673 + }, + { + "epoch": 2.1, + "learning_rate": 4.2051283570760746e-05, + "loss": 1.2893, + "step": 674 + }, + { + "epoch": 2.1, + "learning_rate": 4.178293790969883e-05, + "loss": 1.3452, + "step": 675 + }, + { + "epoch": 2.1, + "learning_rate": 4.1515224899108164e-05, + "loss": 1.332, + "step": 676 + }, + { + "epoch": 2.11, + "learning_rate": 4.1248147448252185e-05, + "loss": 1.2998, + "step": 677 + }, + { + "epoch": 2.11, + "learning_rate": 4.098170845948736e-05, + "loss": 1.2952, + "step": 678 + }, + { + "epoch": 2.11, + "learning_rate": 4.071591082823215e-05, + "loss": 1.3512, + "step": 679 + }, + { + "epoch": 2.12, + "learning_rate": 4.045075744293525e-05, + "loss": 1.3571, + "step": 680 + }, + { + "epoch": 2.12, + "learning_rate": 4.01862511850442e-05, + "loss": 1.3415, + "step": 681 + }, + { + "epoch": 2.12, + "learning_rate": 3.992239492897429e-05, + "loss": 1.3264, + "step": 682 + }, + { + "epoch": 2.12, + "learning_rate": 3.965919154207708e-05, + "loss": 1.3013, + "step": 683 + }, + { + "epoch": 2.13, + "learning_rate": 3.939664388460932e-05, + "loss": 1.369, + "step": 684 + }, + { + "epoch": 2.13, + "learning_rate": 3.913475480970193e-05, + "loss": 1.2464, + "step": 685 + }, + { + "epoch": 2.13, + "learning_rate": 3.887352716332892e-05, + "loss": 1.3162, + "step": 686 + }, + { + "epoch": 2.14, + "learning_rate": 3.861296378427656e-05, + "loss": 1.3221, + "step": 687 + }, + { + "epoch": 2.14, + "learning_rate": 3.835306750411237e-05, + "loss": 1.3219, + "step": 688 + }, + { + "epoch": 2.14, + "learning_rate": 3.8093841147154475e-05, + "loss": 1.3446, + "step": 689 + }, + { + "epoch": 2.15, + "learning_rate": 3.783528753044093e-05, + "loss": 1.3667, + "step": 690 + }, + { + "epoch": 2.15, + "learning_rate": 3.757740946369901e-05, + "loss": 1.3098, + "step": 691 + }, + { + "epoch": 2.15, + "learning_rate": 3.732020974931471e-05, + "loss": 1.3017, + "step": 692 + }, + { + "epoch": 2.16, + "learning_rate": 3.7063691182302304e-05, + "loss": 1.3354, + "step": 693 + }, + { + "epoch": 2.16, + "learning_rate": 3.680785655027399e-05, + "loss": 1.3081, + "step": 694 + }, + { + "epoch": 2.16, + "learning_rate": 3.6552708633409613e-05, + "loss": 1.2563, + "step": 695 + }, + { + "epoch": 2.16, + "learning_rate": 3.6298250204426334e-05, + "loss": 1.307, + "step": 696 + }, + { + "epoch": 2.17, + "learning_rate": 3.6044484028548676e-05, + "loss": 1.2907, + "step": 697 + }, + { + "epoch": 2.17, + "learning_rate": 3.5791412863478326e-05, + "loss": 1.3023, + "step": 698 + }, + { + "epoch": 2.17, + "learning_rate": 3.553903945936421e-05, + "loss": 1.3144, + "step": 699 + }, + { + "epoch": 2.18, + "learning_rate": 3.528736655877264e-05, + "loss": 1.3015, + "step": 700 + }, + { + "epoch": 2.18, + "eval_loss": 1.6665308475494385, + "eval_runtime": 233.6943, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 700 + }, + { + "epoch": 2.18, + "learning_rate": 3.5036396896657455e-05, + "loss": 1.2943, + "step": 701 + }, + { + "epoch": 2.18, + "learning_rate": 3.478613320033042e-05, + "loss": 1.3333, + "step": 702 + }, + { + "epoch": 2.19, + "learning_rate": 3.453657818943142e-05, + "loss": 1.2983, + "step": 703 + }, + { + "epoch": 2.19, + "learning_rate": 3.4287734575898975e-05, + "loss": 1.3392, + "step": 704 + }, + { + "epoch": 2.19, + "learning_rate": 3.403960506394092e-05, + "loss": 1.2677, + "step": 705 + }, + { + "epoch": 2.2, + "learning_rate": 3.379219235000463e-05, + "loss": 1.3197, + "step": 706 + }, + { + "epoch": 2.2, + "learning_rate": 3.3545499122748216e-05, + "loss": 1.3343, + "step": 707 + }, + { + "epoch": 2.2, + "learning_rate": 3.329952806301092e-05, + "loss": 1.3591, + "step": 708 + }, + { + "epoch": 2.21, + "learning_rate": 3.305428184378413e-05, + "loss": 1.3272, + "step": 709 + }, + { + "epoch": 2.21, + "learning_rate": 3.280976313018239e-05, + "loss": 1.3499, + "step": 710 + }, + { + "epoch": 2.21, + "learning_rate": 3.256597457941429e-05, + "loss": 1.3371, + "step": 711 + }, + { + "epoch": 2.21, + "learning_rate": 3.232291884075373e-05, + "loss": 1.312, + "step": 712 + }, + { + "epoch": 2.22, + "learning_rate": 3.208059855551101e-05, + "loss": 1.3502, + "step": 713 + }, + { + "epoch": 2.22, + "learning_rate": 3.18390163570042e-05, + "loss": 1.3094, + "step": 714 + }, + { + "epoch": 2.22, + "learning_rate": 3.1598174870530604e-05, + "loss": 1.3181, + "step": 715 + }, + { + "epoch": 2.23, + "learning_rate": 3.1358076713338014e-05, + "loss": 1.3011, + "step": 716 + }, + { + "epoch": 2.23, + "learning_rate": 3.1118724494596405e-05, + "loss": 1.3054, + "step": 717 + }, + { + "epoch": 2.23, + "learning_rate": 3.0880120815369694e-05, + "loss": 1.3215, + "step": 718 + }, + { + "epoch": 2.24, + "learning_rate": 3.0642268268587136e-05, + "loss": 1.2908, + "step": 719 + }, + { + "epoch": 2.24, + "learning_rate": 3.0405169439015557e-05, + "loss": 1.3334, + "step": 720 + }, + { + "epoch": 2.24, + "learning_rate": 3.0168826903230906e-05, + "loss": 1.3275, + "step": 721 + }, + { + "epoch": 2.25, + "learning_rate": 2.9933243229590568e-05, + "loss": 1.3329, + "step": 722 + }, + { + "epoch": 2.25, + "learning_rate": 2.969842097820519e-05, + "loss": 1.3185, + "step": 723 + }, + { + "epoch": 2.25, + "learning_rate": 2.9464362700910943e-05, + "loss": 1.3443, + "step": 724 + }, + { + "epoch": 2.26, + "learning_rate": 2.9231070941241988e-05, + "loss": 1.3034, + "step": 725 + }, + { + "epoch": 2.26, + "learning_rate": 2.899854823440241e-05, + "loss": 1.304, + "step": 726 + }, + { + "epoch": 2.26, + "learning_rate": 2.8766797107239164e-05, + "loss": 1.3136, + "step": 727 + }, + { + "epoch": 2.26, + "learning_rate": 2.8535820078214236e-05, + "loss": 1.2894, + "step": 728 + }, + { + "epoch": 2.27, + "learning_rate": 2.8305619657377413e-05, + "loss": 1.3303, + "step": 729 + }, + { + "epoch": 2.27, + "learning_rate": 2.8076198346339113e-05, + "loss": 1.3158, + "step": 730 + }, + { + "epoch": 2.27, + "learning_rate": 2.7847558638242964e-05, + "loss": 1.3071, + "step": 731 + }, + { + "epoch": 2.28, + "learning_rate": 2.7619703017738917e-05, + "loss": 1.2951, + "step": 732 + }, + { + "epoch": 2.28, + "learning_rate": 2.7392633960956127e-05, + "loss": 1.3138, + "step": 733 + }, + { + "epoch": 2.28, + "learning_rate": 2.7166353935476085e-05, + "loss": 1.3523, + "step": 734 + }, + { + "epoch": 2.29, + "learning_rate": 2.694086540030587e-05, + "loss": 1.2937, + "step": 735 + }, + { + "epoch": 2.29, + "learning_rate": 2.671617080585127e-05, + "loss": 1.3493, + "step": 736 + }, + { + "epoch": 2.29, + "learning_rate": 2.6492272593890267e-05, + "loss": 1.309, + "step": 737 + }, + { + "epoch": 2.3, + "learning_rate": 2.6269173197546527e-05, + "loss": 1.3188, + "step": 738 + }, + { + "epoch": 2.3, + "learning_rate": 2.6046875041262852e-05, + "loss": 1.3202, + "step": 739 + }, + { + "epoch": 2.3, + "learning_rate": 2.5825380540774914e-05, + "loss": 1.359, + "step": 740 + }, + { + "epoch": 2.3, + "learning_rate": 2.560469210308497e-05, + "loss": 1.2837, + "step": 741 + }, + { + "epoch": 2.31, + "learning_rate": 2.5384812126435697e-05, + "loss": 1.3195, + "step": 742 + }, + { + "epoch": 2.31, + "learning_rate": 2.5165743000284213e-05, + "loss": 1.2797, + "step": 743 + }, + { + "epoch": 2.31, + "learning_rate": 2.4947487105275945e-05, + "loss": 1.3656, + "step": 744 + }, + { + "epoch": 2.32, + "learning_rate": 2.4730046813218987e-05, + "loss": 1.3094, + "step": 745 + }, + { + "epoch": 2.32, + "learning_rate": 2.451342448705811e-05, + "loss": 1.3176, + "step": 746 + }, + { + "epoch": 2.32, + "learning_rate": 2.4297622480849104e-05, + "loss": 1.3318, + "step": 747 + }, + { + "epoch": 2.33, + "learning_rate": 2.408264313973343e-05, + "loss": 1.3367, + "step": 748 + }, + { + "epoch": 2.33, + "learning_rate": 2.3868488799912414e-05, + "loss": 1.2717, + "step": 749 + }, + { + "epoch": 2.33, + "learning_rate": 2.3655161788622138e-05, + "loss": 1.3328, + "step": 750 + }, + { + "epoch": 2.33, + "eval_loss": 1.6713805198669434, + "eval_runtime": 233.7116, + "eval_samples_per_second": 16.358, + "eval_steps_per_second": 4.091, + "step": 750 + }, + { + "epoch": 2.34, + "learning_rate": 2.344266442410794e-05, + "loss": 1.3325, + "step": 751 + }, + { + "epoch": 2.34, + "learning_rate": 2.323099901559931e-05, + "loss": 1.3277, + "step": 752 + }, + { + "epoch": 2.34, + "learning_rate": 2.302016786328488e-05, + "loss": 1.3567, + "step": 753 + }, + { + "epoch": 2.35, + "learning_rate": 2.281017325828716e-05, + "loss": 1.3087, + "step": 754 + }, + { + "epoch": 2.35, + "learning_rate": 2.260101748263803e-05, + "loss": 1.3173, + "step": 755 + }, + { + "epoch": 2.35, + "learning_rate": 2.2392702809253596e-05, + "loss": 1.3234, + "step": 756 + }, + { + "epoch": 2.35, + "learning_rate": 2.218523150190962e-05, + "loss": 1.3649, + "step": 757 + }, + { + "epoch": 2.36, + "learning_rate": 2.1978605815217025e-05, + "loss": 1.3433, + "step": 758 + }, + { + "epoch": 2.36, + "learning_rate": 2.177282799459719e-05, + "loss": 1.2992, + "step": 759 + }, + { + "epoch": 2.36, + "learning_rate": 2.1567900276257703e-05, + "loss": 1.3004, + "step": 760 + }, + { + "epoch": 2.37, + "learning_rate": 2.1363824887167993e-05, + "loss": 1.2894, + "step": 761 + }, + { + "epoch": 2.37, + "learning_rate": 2.1160604045035115e-05, + "loss": 1.3151, + "step": 762 + }, + { + "epoch": 2.37, + "learning_rate": 2.0958239958279756e-05, + "loss": 1.2694, + "step": 763 + }, + { + "epoch": 2.38, + "learning_rate": 2.0756734826012104e-05, + "loss": 1.2979, + "step": 764 + }, + { + "epoch": 2.38, + "learning_rate": 2.0556090838007957e-05, + "loss": 1.3187, + "step": 765 + }, + { + "epoch": 2.38, + "learning_rate": 2.0356310174685124e-05, + "loss": 1.3255, + "step": 766 + }, + { + "epoch": 2.39, + "learning_rate": 2.0157395007079428e-05, + "loss": 1.3623, + "step": 767 + }, + { + "epoch": 2.39, + "learning_rate": 1.9959347496821333e-05, + "loss": 1.317, + "step": 768 + }, + { + "epoch": 2.39, + "learning_rate": 1.9762169796112397e-05, + "loss": 1.3102, + "step": 769 + }, + { + "epoch": 2.4, + "learning_rate": 1.956586404770182e-05, + "loss": 1.244, + "step": 770 + }, + { + "epoch": 2.4, + "learning_rate": 1.937043238486329e-05, + "loss": 1.3051, + "step": 771 + }, + { + "epoch": 2.4, + "learning_rate": 1.9175876931371626e-05, + "loss": 1.2869, + "step": 772 + }, + { + "epoch": 2.4, + "learning_rate": 1.898219980147993e-05, + "loss": 1.3365, + "step": 773 + }, + { + "epoch": 2.41, + "learning_rate": 1.878940309989633e-05, + "loss": 1.3091, + "step": 774 + }, + { + "epoch": 2.41, + "learning_rate": 1.859748892176133e-05, + "loss": 1.3401, + "step": 775 + }, + { + "epoch": 2.41, + "learning_rate": 1.840645935262497e-05, + "loss": 1.3562, + "step": 776 + }, + { + "epoch": 2.42, + "learning_rate": 1.8216316468424098e-05, + "loss": 1.3201, + "step": 777 + }, + { + "epoch": 2.42, + "learning_rate": 1.8027062335459977e-05, + "loss": 1.2757, + "step": 778 + }, + { + "epoch": 2.42, + "learning_rate": 1.7838699010375625e-05, + "loss": 1.3541, + "step": 779 + }, + { + "epoch": 2.43, + "learning_rate": 1.7651228540133623e-05, + "loss": 1.3491, + "step": 780 + }, + { + "epoch": 2.43, + "learning_rate": 1.7464652961993768e-05, + "loss": 1.2903, + "step": 781 + }, + { + "epoch": 2.43, + "learning_rate": 1.727897430349097e-05, + "loss": 1.3879, + "step": 782 + }, + { + "epoch": 2.44, + "learning_rate": 1.7094194582413326e-05, + "loss": 1.3311, + "step": 783 + }, + { + "epoch": 2.44, + "learning_rate": 1.6910315806779987e-05, + "loss": 1.34, + "step": 784 + }, + { + "epoch": 2.44, + "learning_rate": 1.6727339974819456e-05, + "loss": 1.3331, + "step": 785 + }, + { + "epoch": 2.44, + "learning_rate": 1.6545269074947922e-05, + "loss": 1.3164, + "step": 786 + }, + { + "epoch": 2.45, + "learning_rate": 1.636410508574753e-05, + "loss": 1.3505, + "step": 787 + }, + { + "epoch": 2.45, + "learning_rate": 1.618384997594494e-05, + "loss": 1.2556, + "step": 788 + }, + { + "epoch": 2.45, + "learning_rate": 1.6004505704389983e-05, + "loss": 1.3023, + "step": 789 + }, + { + "epoch": 2.46, + "learning_rate": 1.5826074220034226e-05, + "loss": 1.3524, + "step": 790 + }, + { + "epoch": 2.46, + "learning_rate": 1.5648557461910018e-05, + "loss": 1.3215, + "step": 791 + }, + { + "epoch": 2.46, + "learning_rate": 1.547195735910919e-05, + "loss": 1.3593, + "step": 792 + }, + { + "epoch": 2.47, + "learning_rate": 1.5296275830762206e-05, + "loss": 1.3482, + "step": 793 + }, + { + "epoch": 2.47, + "learning_rate": 1.5121514786017365e-05, + "loss": 1.3521, + "step": 794 + }, + { + "epoch": 2.47, + "learning_rate": 1.4947676124019839e-05, + "loss": 1.3138, + "step": 795 + }, + { + "epoch": 2.48, + "learning_rate": 1.4774761733891319e-05, + "loss": 1.3701, + "step": 796 + }, + { + "epoch": 2.48, + "learning_rate": 1.4602773494709254e-05, + "loss": 1.3408, + "step": 797 + }, + { + "epoch": 2.48, + "learning_rate": 1.4431713275486602e-05, + "loss": 1.343, + "step": 798 + }, + { + "epoch": 2.49, + "learning_rate": 1.4261582935151352e-05, + "loss": 1.2744, + "step": 799 + }, + { + "epoch": 2.49, + "learning_rate": 1.4092384322526442e-05, + "loss": 1.3453, + "step": 800 + }, + { + "epoch": 2.49, + "eval_loss": 1.6718111038208008, + "eval_runtime": 233.7605, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 800 + }, + { + "epoch": 2.49, + "learning_rate": 1.3924119276309677e-05, + "loss": 1.2647, + "step": 801 + }, + { + "epoch": 2.49, + "learning_rate": 1.3756789625053601e-05, + "loss": 1.321, + "step": 802 + }, + { + "epoch": 2.5, + "learning_rate": 1.3590397187145853e-05, + "loss": 1.3403, + "step": 803 + }, + { + "epoch": 2.5, + "learning_rate": 1.3424943770789211e-05, + "loss": 1.3191, + "step": 804 + }, + { + "epoch": 2.5, + "learning_rate": 1.3260431173982001e-05, + "loss": 1.2983, + "step": 805 + }, + { + "epoch": 2.51, + "learning_rate": 1.3096861184498643e-05, + "loss": 1.2955, + "step": 806 + }, + { + "epoch": 2.51, + "learning_rate": 1.293423557987009e-05, + "loss": 1.3297, + "step": 807 + }, + { + "epoch": 2.51, + "learning_rate": 1.2772556127364588e-05, + "loss": 1.3273, + "step": 808 + }, + { + "epoch": 2.52, + "learning_rate": 1.2611824583968457e-05, + "loss": 1.2867, + "step": 809 + }, + { + "epoch": 2.52, + "learning_rate": 1.2452042696366984e-05, + "loss": 1.3132, + "step": 810 + }, + { + "epoch": 2.52, + "learning_rate": 1.229321220092552e-05, + "loss": 1.323, + "step": 811 + }, + { + "epoch": 2.53, + "learning_rate": 1.2135334823670452e-05, + "loss": 1.3332, + "step": 812 + }, + { + "epoch": 2.53, + "learning_rate": 1.1978412280270568e-05, + "loss": 1.2775, + "step": 813 + }, + { + "epoch": 2.53, + "learning_rate": 1.182244627601845e-05, + "loss": 1.3049, + "step": 814 + }, + { + "epoch": 2.53, + "learning_rate": 1.1667438505811801e-05, + "loss": 1.3206, + "step": 815 + }, + { + "epoch": 2.54, + "learning_rate": 1.1513390654135103e-05, + "loss": 1.386, + "step": 816 + }, + { + "epoch": 2.54, + "learning_rate": 1.1360304395041343e-05, + "loss": 1.3292, + "step": 817 + }, + { + "epoch": 2.54, + "learning_rate": 1.1208181392133766e-05, + "loss": 1.3249, + "step": 818 + }, + { + "epoch": 2.55, + "learning_rate": 1.1057023298547864e-05, + "loss": 1.2934, + "step": 819 + }, + { + "epoch": 2.55, + "learning_rate": 1.0906831756933267e-05, + "loss": 1.3471, + "step": 820 + }, + { + "epoch": 2.55, + "learning_rate": 1.0757608399436125e-05, + "loss": 1.3505, + "step": 821 + }, + { + "epoch": 2.56, + "learning_rate": 1.0609354847681152e-05, + "loss": 1.283, + "step": 822 + }, + { + "epoch": 2.56, + "learning_rate": 1.0462072712754035e-05, + "loss": 1.2679, + "step": 823 + }, + { + "epoch": 2.56, + "learning_rate": 1.0315763595184113e-05, + "loss": 1.3317, + "step": 824 + }, + { + "epoch": 2.57, + "learning_rate": 1.0170429084926746e-05, + "loss": 1.308, + "step": 825 + }, + { + "epoch": 2.57, + "learning_rate": 1.0026070761346229e-05, + "loss": 1.2816, + "step": 826 + }, + { + "epoch": 2.57, + "learning_rate": 9.882690193198463e-06, + "loss": 1.2712, + "step": 827 + }, + { + "epoch": 2.58, + "learning_rate": 9.740288938613995e-06, + "loss": 1.3133, + "step": 828 + }, + { + "epoch": 2.58, + "learning_rate": 9.598868545081153e-06, + "loss": 1.257, + "step": 829 + }, + { + "epoch": 2.58, + "learning_rate": 9.458430549429032e-06, + "loss": 1.3271, + "step": 830 + }, + { + "epoch": 2.58, + "learning_rate": 9.318976477811026e-06, + "loss": 1.3329, + "step": 831 + }, + { + "epoch": 2.59, + "learning_rate": 9.18050784568808e-06, + "loss": 1.2939, + "step": 832 + }, + { + "epoch": 2.59, + "learning_rate": 9.043026157812229e-06, + "loss": 1.3111, + "step": 833 + }, + { + "epoch": 2.59, + "learning_rate": 8.906532908210396e-06, + "loss": 1.3164, + "step": 834 + }, + { + "epoch": 2.6, + "learning_rate": 8.771029580167967e-06, + "loss": 1.3162, + "step": 835 + }, + { + "epoch": 2.6, + "learning_rate": 8.636517646212761e-06, + "loss": 1.303, + "step": 836 + }, + { + "epoch": 2.6, + "learning_rate": 8.502998568099063e-06, + "loss": 1.3545, + "step": 837 + }, + { + "epoch": 2.61, + "learning_rate": 8.370473796791622e-06, + "loss": 1.3224, + "step": 838 + }, + { + "epoch": 2.61, + "learning_rate": 8.238944772450064e-06, + "loss": 1.3146, + "step": 839 + }, + { + "epoch": 2.61, + "learning_rate": 8.108412924413056e-06, + "loss": 1.3171, + "step": 840 + }, + { + "epoch": 2.62, + "learning_rate": 7.978879671182848e-06, + "loss": 1.3209, + "step": 841 + }, + { + "epoch": 2.62, + "learning_rate": 7.850346420409949e-06, + "loss": 1.3143, + "step": 842 + }, + { + "epoch": 2.62, + "learning_rate": 7.722814568877646e-06, + "loss": 1.3112, + "step": 843 + }, + { + "epoch": 2.63, + "learning_rate": 7.596285502486966e-06, + "loss": 1.3056, + "step": 844 + }, + { + "epoch": 2.63, + "learning_rate": 7.4707605962415775e-06, + "loss": 1.3151, + "step": 845 + }, + { + "epoch": 2.63, + "learning_rate": 7.346241214232819e-06, + "loss": 1.3774, + "step": 846 + }, + { + "epoch": 2.63, + "learning_rate": 7.222728709624949e-06, + "loss": 1.3432, + "step": 847 + }, + { + "epoch": 2.64, + "learning_rate": 7.100224424640312e-06, + "loss": 1.3036, + "step": 848 + }, + { + "epoch": 2.64, + "learning_rate": 6.978729690544927e-06, + "loss": 1.2911, + "step": 849 + }, + { + "epoch": 2.64, + "learning_rate": 6.858245827633869e-06, + "loss": 1.3458, + "step": 850 + }, + { + "epoch": 2.64, + "eval_loss": 1.6725014448165894, + "eval_runtime": 233.7534, + "eval_samples_per_second": 16.355, + "eval_steps_per_second": 4.09, + "step": 850 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 2.3836309088894976e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-850/training_args.bin b/checkpoint-850/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-850/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-900/README.md b/checkpoint-900/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-900/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-900/adapter_config.json b/checkpoint-900/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-900/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-900/adapter_model.bin b/checkpoint-900/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..dc9cde3c980ace86b621fef25aaf1a04ca54b3c0 --- /dev/null +++ b/checkpoint-900/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd6bfcbfc13360672dffae164f83ae420a762053e23229d289f8d2c40c1edeeb +size 84046925 diff --git a/checkpoint-900/optimizer.pt b/checkpoint-900/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..788322d19e564fc787c032b751531cb594905e8e --- /dev/null +++ b/checkpoint-900/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04ba45b8efa7a87749069a682a216e684c585cec484ea18f1f57a6a46d2af0c0 +size 168039557 diff --git a/checkpoint-900/rng_state.pth b/checkpoint-900/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..e6c1231c81c6817a7cbc662e7cf62104f850df88 --- /dev/null +++ b/checkpoint-900/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e18df90d46286cfa318bfd9d4a9a9fa83fa0c138ab13a0f5ac0f98c0d4f7bf31 +size 14575 diff --git a/checkpoint-900/scheduler.pt b/checkpoint-900/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..7e41da3d4708396959c7bd9a905ec98f69cf6498 --- /dev/null +++ b/checkpoint-900/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9712304e87f49f09c3620cd836c7601bda9396f1d511b39ce5ef6e02d3b31e63 +size 627 diff --git a/checkpoint-900/trainer_state.json b/checkpoint-900/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..dd885d996b7e95f372ec4958993b18904c3b392d --- /dev/null +++ b/checkpoint-900/trainer_state.json @@ -0,0 +1,5563 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 2.7993779160186625, + "eval_steps": 50, + "global_step": 900, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012781362737859735, + "loss": 1.4867, + "step": 401 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012749683144111095, + "loss": 1.4923, + "step": 402 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012717973669288513, + "loss": 1.4858, + "step": 403 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012686234657981933, + "loss": 1.4464, + "step": 404 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012654466455102272, + "loss": 1.4598, + "step": 405 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012622669405877685, + "loss": 1.4237, + "step": 406 + }, + { + "epoch": 1.27, + "learning_rate": 0.0001259084385584979, + "loss": 1.475, + "step": 407 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012558990150869935, + "loss": 1.5201, + "step": 408 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012527108637095427, + "loss": 1.4735, + "step": 409 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012495199660985767, + "loss": 1.4676, + "step": 410 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012463263569298914, + "loss": 1.4671, + "step": 411 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012431300709087468, + "loss": 1.4724, + "step": 412 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012399311427694945, + "loss": 1.5451, + "step": 413 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001236729607275197, + "loss": 1.492, + "step": 414 + }, + { + "epoch": 1.29, + "learning_rate": 0.00012335254992172512, + "loss": 1.5186, + "step": 415 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001230318853415012, + "loss": 1.4622, + "step": 416 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012271097047154096, + "loss": 1.4937, + "step": 417 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012238980879925756, + "loss": 1.4575, + "step": 418 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012206840381474608, + "loss": 1.4801, + "step": 419 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012174675901074577, + "loss": 1.4523, + "step": 420 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012142487788260191, + "loss": 1.4957, + "step": 421 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012110276392822799, + "loss": 1.4757, + "step": 422 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001207804206480677, + "loss": 1.4769, + "step": 423 + }, + { + "epoch": 1.32, + "learning_rate": 0.00012045785154505676, + "loss": 1.4435, + "step": 424 + }, + { + "epoch": 1.32, + "learning_rate": 0.000120135060124585, + "loss": 1.5211, + "step": 425 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011981204989445811, + "loss": 1.4248, + "step": 426 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011948882436485969, + "loss": 1.4883, + "step": 427 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011916538704831293, + "loss": 1.4919, + "step": 428 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011884174145964262, + "loss": 1.4689, + "step": 429 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011851789111593688, + "loss": 1.4071, + "step": 430 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011819383953650874, + "loss": 1.4418, + "step": 431 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011786959024285826, + "loss": 1.5206, + "step": 432 + }, + { + "epoch": 1.35, + "learning_rate": 0.00011754514675863408, + "loss": 1.446, + "step": 433 + }, + { + "epoch": 1.35, + "learning_rate": 0.000117220512609595, + "loss": 1.5165, + "step": 434 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001168956913235719, + "loss": 1.4119, + "step": 435 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011657068643042924, + "loss": 1.503, + "step": 436 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011624550146202682, + "loss": 1.4573, + "step": 437 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011592013995218123, + "loss": 1.4707, + "step": 438 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011559460543662768, + "loss": 1.4304, + "step": 439 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011526890145298137, + "loss": 1.4465, + "step": 440 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001149430315406991, + "loss": 1.4912, + "step": 441 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001146169992410409, + "loss": 1.4549, + "step": 442 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011429080809703145, + "loss": 1.4528, + "step": 443 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011396446165342165, + "loss": 1.4148, + "step": 444 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011363796345665001, + "loss": 1.467, + "step": 445 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001133113170548041, + "loss": 1.492, + "step": 446 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011298452599758217, + "loss": 1.5244, + "step": 447 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011265759383625436, + "loss": 1.4553, + "step": 448 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001123305241236243, + "loss": 1.4764, + "step": 449 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011200332041399027, + "loss": 1.4354, + "step": 450 + }, + { + "epoch": 1.4, + "eval_loss": 1.6193681955337524, + "eval_runtime": 233.6751, + "eval_samples_per_second": 16.36, + "eval_steps_per_second": 4.091, + "step": 450 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011167598626310682, + "loss": 1.4946, + "step": 451 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011134852522814596, + "loss": 1.4558, + "step": 452 + }, + { + "epoch": 1.41, + "learning_rate": 0.0001110209408676586, + "loss": 1.4549, + "step": 453 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011069323674153585, + "loss": 1.4992, + "step": 454 + }, + { + "epoch": 1.42, + "learning_rate": 0.0001103654164109702, + "loss": 1.4828, + "step": 455 + }, + { + "epoch": 1.42, + "learning_rate": 0.00011003748343841711, + "loss": 1.4939, + "step": 456 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010970944138755604, + "loss": 1.4761, + "step": 457 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010938129382325184, + "loss": 1.4394, + "step": 458 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010905304431151602, + "loss": 1.4852, + "step": 459 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010872469641946783, + "loss": 1.4479, + "step": 460 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010839625371529583, + "loss": 1.5161, + "step": 461 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010806771976821872, + "loss": 1.5104, + "step": 462 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001077390981484469, + "loss": 1.5056, + "step": 463 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010741039242714337, + "loss": 1.4919, + "step": 464 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010708160617638521, + "loss": 1.4605, + "step": 465 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010675274296912452, + "loss": 1.5191, + "step": 466 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010642380637914975, + "loss": 1.4504, + "step": 467 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010609479998104684, + "loss": 1.4619, + "step": 468 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010576572735016016, + "loss": 1.4619, + "step": 469 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010543659206255409, + "loss": 1.4962, + "step": 470 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010510739769497378, + "loss": 1.4901, + "step": 471 + }, + { + "epoch": 1.47, + "learning_rate": 0.0001047781478248063, + "loss": 1.4708, + "step": 472 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010444884603004213, + "loss": 1.4756, + "step": 473 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010411949588923577, + "loss": 1.3948, + "step": 474 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010379010098146728, + "loss": 1.5183, + "step": 475 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010346066488630308, + "loss": 1.4252, + "step": 476 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010313119118375727, + "loss": 1.4686, + "step": 477 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010280168345425256, + "loss": 1.5285, + "step": 478 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010247214527858149, + "loss": 1.4649, + "step": 479 + }, + { + "epoch": 1.49, + "learning_rate": 0.0001021425802378674, + "loss": 1.4602, + "step": 480 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010181299191352566, + "loss": 1.5102, + "step": 481 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010148338388722465, + "loss": 1.4894, + "step": 482 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010115375974084677, + "loss": 1.501, + "step": 483 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010082412305644964, + "loss": 1.481, + "step": 484 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010049447741622717, + "loss": 1.4927, + "step": 485 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010016482640247058, + "loss": 1.512, + "step": 486 + }, + { + "epoch": 1.51, + "learning_rate": 9.983517359752945e-05, + "loss": 1.4622, + "step": 487 + }, + { + "epoch": 1.52, + "learning_rate": 9.950552258377284e-05, + "loss": 1.4956, + "step": 488 + }, + { + "epoch": 1.52, + "learning_rate": 9.917587694355037e-05, + "loss": 1.493, + "step": 489 + }, + { + "epoch": 1.52, + "learning_rate": 9.884624025915328e-05, + "loss": 1.4629, + "step": 490 + }, + { + "epoch": 1.53, + "learning_rate": 9.851661611277537e-05, + "loss": 1.4531, + "step": 491 + }, + { + "epoch": 1.53, + "learning_rate": 9.818700808647435e-05, + "loss": 1.4656, + "step": 492 + }, + { + "epoch": 1.53, + "learning_rate": 9.785741976213261e-05, + "loss": 1.4982, + "step": 493 + }, + { + "epoch": 1.54, + "learning_rate": 9.752785472141854e-05, + "loss": 1.5053, + "step": 494 + }, + { + "epoch": 1.54, + "learning_rate": 9.719831654574745e-05, + "loss": 1.4619, + "step": 495 + }, + { + "epoch": 1.54, + "learning_rate": 9.686880881624275e-05, + "loss": 1.486, + "step": 496 + }, + { + "epoch": 1.55, + "learning_rate": 9.653933511369696e-05, + "loss": 1.4788, + "step": 497 + }, + { + "epoch": 1.55, + "learning_rate": 9.620989901853275e-05, + "loss": 1.4663, + "step": 498 + }, + { + "epoch": 1.55, + "learning_rate": 9.588050411076424e-05, + "loss": 1.5138, + "step": 499 + }, + { + "epoch": 1.56, + "learning_rate": 9.555115396995788e-05, + "loss": 1.4427, + "step": 500 + }, + { + "epoch": 1.56, + "eval_loss": 1.6187018156051636, + "eval_runtime": 233.6591, + "eval_samples_per_second": 16.361, + "eval_steps_per_second": 4.091, + "step": 500 + }, + { + "epoch": 1.56, + "learning_rate": 9.522185217519371e-05, + "loss": 1.4696, + "step": 501 + }, + { + "epoch": 1.56, + "learning_rate": 9.489260230502626e-05, + "loss": 1.4052, + "step": 502 + }, + { + "epoch": 1.56, + "learning_rate": 9.45634079374459e-05, + "loss": 1.4688, + "step": 503 + }, + { + "epoch": 1.57, + "learning_rate": 9.423427264983986e-05, + "loss": 1.4266, + "step": 504 + }, + { + "epoch": 1.57, + "learning_rate": 9.390520001895321e-05, + "loss": 1.4887, + "step": 505 + }, + { + "epoch": 1.57, + "learning_rate": 9.357619362085027e-05, + "loss": 1.4992, + "step": 506 + }, + { + "epoch": 1.58, + "learning_rate": 9.32472570308755e-05, + "loss": 1.4626, + "step": 507 + }, + { + "epoch": 1.58, + "learning_rate": 9.291839382361481e-05, + "loss": 1.4984, + "step": 508 + }, + { + "epoch": 1.58, + "learning_rate": 9.258960757285664e-05, + "loss": 1.3692, + "step": 509 + }, + { + "epoch": 1.59, + "learning_rate": 9.226090185155314e-05, + "loss": 1.4325, + "step": 510 + }, + { + "epoch": 1.59, + "learning_rate": 9.19322802317813e-05, + "loss": 1.5049, + "step": 511 + }, + { + "epoch": 1.59, + "learning_rate": 9.160374628470421e-05, + "loss": 1.4589, + "step": 512 + }, + { + "epoch": 1.6, + "learning_rate": 9.127530358053218e-05, + "loss": 1.4291, + "step": 513 + }, + { + "epoch": 1.6, + "learning_rate": 9.094695568848402e-05, + "loss": 1.4474, + "step": 514 + }, + { + "epoch": 1.6, + "learning_rate": 9.061870617674817e-05, + "loss": 1.513, + "step": 515 + }, + { + "epoch": 1.6, + "learning_rate": 9.029055861244397e-05, + "loss": 1.4609, + "step": 516 + }, + { + "epoch": 1.61, + "learning_rate": 8.99625165615829e-05, + "loss": 1.5144, + "step": 517 + }, + { + "epoch": 1.61, + "learning_rate": 8.963458358902985e-05, + "loss": 1.4294, + "step": 518 + }, + { + "epoch": 1.61, + "learning_rate": 8.93067632584642e-05, + "loss": 1.4516, + "step": 519 + }, + { + "epoch": 1.62, + "learning_rate": 8.897905913234143e-05, + "loss": 1.4659, + "step": 520 + }, + { + "epoch": 1.62, + "learning_rate": 8.865147477185405e-05, + "loss": 1.4787, + "step": 521 + }, + { + "epoch": 1.62, + "learning_rate": 8.832401373689319e-05, + "loss": 1.4601, + "step": 522 + }, + { + "epoch": 1.63, + "learning_rate": 8.799667958600973e-05, + "loss": 1.4955, + "step": 523 + }, + { + "epoch": 1.63, + "learning_rate": 8.766947587637573e-05, + "loss": 1.4231, + "step": 524 + }, + { + "epoch": 1.63, + "learning_rate": 8.734240616374565e-05, + "loss": 1.4952, + "step": 525 + }, + { + "epoch": 1.64, + "learning_rate": 8.701547400241788e-05, + "loss": 1.4707, + "step": 526 + }, + { + "epoch": 1.64, + "learning_rate": 8.668868294519593e-05, + "loss": 1.5023, + "step": 527 + }, + { + "epoch": 1.64, + "learning_rate": 8.636203654335002e-05, + "loss": 1.4702, + "step": 528 + }, + { + "epoch": 1.65, + "learning_rate": 8.603553834657836e-05, + "loss": 1.4399, + "step": 529 + }, + { + "epoch": 1.65, + "learning_rate": 8.570919190296855e-05, + "loss": 1.5175, + "step": 530 + }, + { + "epoch": 1.65, + "learning_rate": 8.53830007589591e-05, + "loss": 1.4715, + "step": 531 + }, + { + "epoch": 1.65, + "learning_rate": 8.505696845930096e-05, + "loss": 1.5292, + "step": 532 + }, + { + "epoch": 1.66, + "learning_rate": 8.473109854701869e-05, + "loss": 1.5287, + "step": 533 + }, + { + "epoch": 1.66, + "learning_rate": 8.440539456337235e-05, + "loss": 1.4762, + "step": 534 + }, + { + "epoch": 1.66, + "learning_rate": 8.407986004781879e-05, + "loss": 1.4536, + "step": 535 + }, + { + "epoch": 1.67, + "learning_rate": 8.375449853797322e-05, + "loss": 1.5018, + "step": 536 + }, + { + "epoch": 1.67, + "learning_rate": 8.342931356957076e-05, + "loss": 1.4723, + "step": 537 + }, + { + "epoch": 1.67, + "learning_rate": 8.310430867642812e-05, + "loss": 1.4905, + "step": 538 + }, + { + "epoch": 1.68, + "learning_rate": 8.277948739040503e-05, + "loss": 1.4651, + "step": 539 + }, + { + "epoch": 1.68, + "learning_rate": 8.245485324136597e-05, + "loss": 1.4482, + "step": 540 + }, + { + "epoch": 1.68, + "learning_rate": 8.213040975714175e-05, + "loss": 1.3977, + "step": 541 + }, + { + "epoch": 1.69, + "learning_rate": 8.180616046349129e-05, + "loss": 1.5594, + "step": 542 + }, + { + "epoch": 1.69, + "learning_rate": 8.148210888406316e-05, + "loss": 1.4995, + "step": 543 + }, + { + "epoch": 1.69, + "learning_rate": 8.115825854035737e-05, + "loss": 1.5106, + "step": 544 + }, + { + "epoch": 1.7, + "learning_rate": 8.083461295168707e-05, + "loss": 1.4219, + "step": 545 + }, + { + "epoch": 1.7, + "learning_rate": 8.051117563514036e-05, + "loss": 1.4766, + "step": 546 + }, + { + "epoch": 1.7, + "learning_rate": 8.018795010554193e-05, + "loss": 1.5241, + "step": 547 + }, + { + "epoch": 1.7, + "learning_rate": 7.986493987541502e-05, + "loss": 1.4673, + "step": 548 + }, + { + "epoch": 1.71, + "learning_rate": 7.954214845494325e-05, + "loss": 1.4236, + "step": 549 + }, + { + "epoch": 1.71, + "learning_rate": 7.921957935193232e-05, + "loss": 1.4687, + "step": 550 + }, + { + "epoch": 1.71, + "eval_loss": 1.617763876914978, + "eval_runtime": 233.6334, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 550 + }, + { + "epoch": 1.71, + "learning_rate": 7.889723607177202e-05, + "loss": 1.4412, + "step": 551 + }, + { + "epoch": 1.72, + "learning_rate": 7.857512211739813e-05, + "loss": 1.4464, + "step": 552 + }, + { + "epoch": 1.72, + "learning_rate": 7.825324098925427e-05, + "loss": 1.4043, + "step": 553 + }, + { + "epoch": 1.72, + "learning_rate": 7.793159618525393e-05, + "loss": 1.4384, + "step": 554 + }, + { + "epoch": 1.73, + "learning_rate": 7.761019120074245e-05, + "loss": 1.4781, + "step": 555 + }, + { + "epoch": 1.73, + "learning_rate": 7.728902952845905e-05, + "loss": 1.4311, + "step": 556 + }, + { + "epoch": 1.73, + "learning_rate": 7.696811465849883e-05, + "loss": 1.4926, + "step": 557 + }, + { + "epoch": 1.74, + "learning_rate": 7.664745007827489e-05, + "loss": 1.4739, + "step": 558 + }, + { + "epoch": 1.74, + "learning_rate": 7.632703927248033e-05, + "loss": 1.509, + "step": 559 + }, + { + "epoch": 1.74, + "learning_rate": 7.60068857230506e-05, + "loss": 1.4555, + "step": 560 + }, + { + "epoch": 1.74, + "learning_rate": 7.568699290912533e-05, + "loss": 1.4588, + "step": 561 + }, + { + "epoch": 1.75, + "learning_rate": 7.536736430701088e-05, + "loss": 1.4574, + "step": 562 + }, + { + "epoch": 1.75, + "learning_rate": 7.504800339014232e-05, + "loss": 1.4805, + "step": 563 + }, + { + "epoch": 1.75, + "learning_rate": 7.472891362904577e-05, + "loss": 1.5081, + "step": 564 + }, + { + "epoch": 1.76, + "learning_rate": 7.441009849130067e-05, + "loss": 1.5081, + "step": 565 + }, + { + "epoch": 1.76, + "learning_rate": 7.409156144150213e-05, + "loss": 1.4548, + "step": 566 + }, + { + "epoch": 1.76, + "learning_rate": 7.377330594122317e-05, + "loss": 1.4478, + "step": 567 + }, + { + "epoch": 1.77, + "learning_rate": 7.34553354489773e-05, + "loss": 1.5048, + "step": 568 + }, + { + "epoch": 1.77, + "learning_rate": 7.31376534201807e-05, + "loss": 1.4889, + "step": 569 + }, + { + "epoch": 1.77, + "learning_rate": 7.282026330711489e-05, + "loss": 1.5045, + "step": 570 + }, + { + "epoch": 1.78, + "learning_rate": 7.250316855888906e-05, + "loss": 1.4352, + "step": 571 + }, + { + "epoch": 1.78, + "learning_rate": 7.218637262140268e-05, + "loss": 1.4881, + "step": 572 + }, + { + "epoch": 1.78, + "learning_rate": 7.186987893730797e-05, + "loss": 1.449, + "step": 573 + }, + { + "epoch": 1.79, + "learning_rate": 7.155369094597253e-05, + "loss": 1.4146, + "step": 574 + }, + { + "epoch": 1.79, + "learning_rate": 7.1237812083442e-05, + "loss": 1.4462, + "step": 575 + }, + { + "epoch": 1.79, + "learning_rate": 7.092224578240269e-05, + "loss": 1.4509, + "step": 576 + }, + { + "epoch": 1.79, + "learning_rate": 7.060699547214427e-05, + "loss": 1.4483, + "step": 577 + }, + { + "epoch": 1.8, + "learning_rate": 7.029206457852247e-05, + "loss": 1.4348, + "step": 578 + }, + { + "epoch": 1.8, + "learning_rate": 6.997745652392191e-05, + "loss": 1.4931, + "step": 579 + }, + { + "epoch": 1.8, + "learning_rate": 6.966317472721897e-05, + "loss": 1.4132, + "step": 580 + }, + { + "epoch": 1.81, + "learning_rate": 6.934922260374437e-05, + "loss": 1.3974, + "step": 581 + }, + { + "epoch": 1.81, + "learning_rate": 6.903560356524641e-05, + "loss": 1.4326, + "step": 582 + }, + { + "epoch": 1.81, + "learning_rate": 6.872232101985363e-05, + "loss": 1.4349, + "step": 583 + }, + { + "epoch": 1.82, + "learning_rate": 6.840937837203791e-05, + "loss": 1.4528, + "step": 584 + }, + { + "epoch": 1.82, + "learning_rate": 6.809677902257742e-05, + "loss": 1.4365, + "step": 585 + }, + { + "epoch": 1.82, + "learning_rate": 6.778452636851968e-05, + "loss": 1.4702, + "step": 586 + }, + { + "epoch": 1.83, + "learning_rate": 6.747262380314463e-05, + "loss": 1.458, + "step": 587 + }, + { + "epoch": 1.83, + "learning_rate": 6.71610747159277e-05, + "loss": 1.5413, + "step": 588 + }, + { + "epoch": 1.83, + "learning_rate": 6.684988249250314e-05, + "loss": 1.4205, + "step": 589 + }, + { + "epoch": 1.84, + "learning_rate": 6.653905051462708e-05, + "loss": 1.4643, + "step": 590 + }, + { + "epoch": 1.84, + "learning_rate": 6.622858216014084e-05, + "loss": 1.4071, + "step": 591 + }, + { + "epoch": 1.84, + "learning_rate": 6.591848080293418e-05, + "loss": 1.4669, + "step": 592 + }, + { + "epoch": 1.84, + "learning_rate": 6.56087498129087e-05, + "loss": 1.5062, + "step": 593 + }, + { + "epoch": 1.85, + "learning_rate": 6.52993925559412e-05, + "loss": 1.4334, + "step": 594 + }, + { + "epoch": 1.85, + "learning_rate": 6.499041239384698e-05, + "loss": 1.4696, + "step": 595 + }, + { + "epoch": 1.85, + "learning_rate": 6.468181268434354e-05, + "loss": 1.4575, + "step": 596 + }, + { + "epoch": 1.86, + "learning_rate": 6.437359678101389e-05, + "loss": 1.4432, + "step": 597 + }, + { + "epoch": 1.86, + "learning_rate": 6.406576803327022e-05, + "loss": 1.5047, + "step": 598 + }, + { + "epoch": 1.86, + "learning_rate": 6.375832978631743e-05, + "loss": 1.4297, + "step": 599 + }, + { + "epoch": 1.87, + "learning_rate": 6.345128538111685e-05, + "loss": 1.461, + "step": 600 + }, + { + "epoch": 1.87, + "eval_loss": 1.6174333095550537, + "eval_runtime": 233.649, + "eval_samples_per_second": 16.362, + "eval_steps_per_second": 4.092, + "step": 600 + }, + { + "epoch": 1.87, + "learning_rate": 6.314463815434988e-05, + "loss": 1.4978, + "step": 601 + }, + { + "epoch": 1.87, + "learning_rate": 6.283839143838169e-05, + "loss": 1.426, + "step": 602 + }, + { + "epoch": 1.88, + "learning_rate": 6.253254856122511e-05, + "loss": 1.4657, + "step": 603 + }, + { + "epoch": 1.88, + "learning_rate": 6.222711284650444e-05, + "loss": 1.5282, + "step": 604 + }, + { + "epoch": 1.88, + "learning_rate": 6.192208761341925e-05, + "loss": 1.4897, + "step": 605 + }, + { + "epoch": 1.88, + "learning_rate": 6.161747617670839e-05, + "loss": 1.4827, + "step": 606 + }, + { + "epoch": 1.89, + "learning_rate": 6.131328184661396e-05, + "loss": 1.4507, + "step": 607 + }, + { + "epoch": 1.89, + "learning_rate": 6.100950792884533e-05, + "loss": 1.4461, + "step": 608 + }, + { + "epoch": 1.89, + "learning_rate": 6.070615772454312e-05, + "loss": 1.4187, + "step": 609 + }, + { + "epoch": 1.9, + "learning_rate": 6.040323453024351e-05, + "loss": 1.4704, + "step": 610 + }, + { + "epoch": 1.9, + "learning_rate": 6.0100741637842316e-05, + "loss": 1.4869, + "step": 611 + }, + { + "epoch": 1.9, + "learning_rate": 5.979868233455917e-05, + "loss": 1.4657, + "step": 612 + }, + { + "epoch": 1.91, + "learning_rate": 5.949705990290186e-05, + "loss": 1.4234, + "step": 613 + }, + { + "epoch": 1.91, + "learning_rate": 5.919587762063072e-05, + "loss": 1.4519, + "step": 614 + }, + { + "epoch": 1.91, + "learning_rate": 5.889513876072283e-05, + "loss": 1.4588, + "step": 615 + }, + { + "epoch": 1.92, + "learning_rate": 5.859484659133663e-05, + "loss": 1.4867, + "step": 616 + }, + { + "epoch": 1.92, + "learning_rate": 5.829500437577626e-05, + "loss": 1.5157, + "step": 617 + }, + { + "epoch": 1.92, + "learning_rate": 5.799561537245628e-05, + "loss": 1.4492, + "step": 618 + }, + { + "epoch": 1.93, + "learning_rate": 5.769668283486607e-05, + "loss": 1.514, + "step": 619 + }, + { + "epoch": 1.93, + "learning_rate": 5.739821001153451e-05, + "loss": 1.5127, + "step": 620 + }, + { + "epoch": 1.93, + "learning_rate": 5.710020014599486e-05, + "loss": 1.4204, + "step": 621 + }, + { + "epoch": 1.93, + "learning_rate": 5.680265647674925e-05, + "loss": 1.4346, + "step": 622 + }, + { + "epoch": 1.94, + "learning_rate": 5.650558223723365e-05, + "loss": 1.4342, + "step": 623 + }, + { + "epoch": 1.94, + "learning_rate": 5.620898065578268e-05, + "loss": 1.4699, + "step": 624 + }, + { + "epoch": 1.94, + "learning_rate": 5.591285495559453e-05, + "loss": 1.5088, + "step": 625 + }, + { + "epoch": 1.95, + "learning_rate": 5.561720835469602e-05, + "loss": 1.5015, + "step": 626 + }, + { + "epoch": 1.95, + "learning_rate": 5.5322044065907475e-05, + "loss": 1.4243, + "step": 627 + }, + { + "epoch": 1.95, + "learning_rate": 5.502736529680785e-05, + "loss": 1.4553, + "step": 628 + }, + { + "epoch": 1.96, + "learning_rate": 5.47331752497001e-05, + "loss": 1.4419, + "step": 629 + }, + { + "epoch": 1.96, + "learning_rate": 5.443947712157587e-05, + "loss": 1.4172, + "step": 630 + }, + { + "epoch": 1.96, + "learning_rate": 5.41462741040814e-05, + "loss": 1.4888, + "step": 631 + }, + { + "epoch": 1.97, + "learning_rate": 5.385356938348234e-05, + "loss": 1.412, + "step": 632 + }, + { + "epoch": 1.97, + "learning_rate": 5.3561366140629274e-05, + "loss": 1.4327, + "step": 633 + }, + { + "epoch": 1.97, + "learning_rate": 5.326966755092334e-05, + "loss": 1.502, + "step": 634 + }, + { + "epoch": 1.98, + "learning_rate": 5.297847678428141e-05, + "loss": 1.4499, + "step": 635 + }, + { + "epoch": 1.98, + "learning_rate": 5.2687797005101834e-05, + "loss": 1.4783, + "step": 636 + }, + { + "epoch": 1.98, + "learning_rate": 5.239763137223004e-05, + "loss": 1.4378, + "step": 637 + }, + { + "epoch": 1.98, + "learning_rate": 5.21079830389241e-05, + "loss": 1.5055, + "step": 638 + }, + { + "epoch": 1.99, + "learning_rate": 5.18188551528207e-05, + "loss": 1.4963, + "step": 639 + }, + { + "epoch": 1.99, + "learning_rate": 5.1530250855900576e-05, + "loss": 1.4799, + "step": 640 + }, + { + "epoch": 1.99, + "learning_rate": 5.124217328445475e-05, + "loss": 1.4388, + "step": 641 + }, + { + "epoch": 2.0, + "learning_rate": 5.095462556905021e-05, + "loss": 1.484, + "step": 642 + }, + { + "epoch": 2.0, + "learning_rate": 5.0667610834495785e-05, + "loss": 1.4811, + "step": 643 + }, + { + "epoch": 2.0, + "learning_rate": 5.03811321998086e-05, + "loss": 1.2941, + "step": 644 + }, + { + "epoch": 2.01, + "learning_rate": 5.009519277817976e-05, + "loss": 1.3975, + "step": 645 + }, + { + "epoch": 2.01, + "learning_rate": 4.9809795676940815e-05, + "loss": 1.3432, + "step": 646 + }, + { + "epoch": 2.01, + "learning_rate": 4.952494399752976e-05, + "loss": 1.3014, + "step": 647 + }, + { + "epoch": 2.02, + "learning_rate": 4.924064083545744e-05, + "loss": 1.3491, + "step": 648 + }, + { + "epoch": 2.02, + "learning_rate": 4.8956889280274056e-05, + "loss": 1.3238, + "step": 649 + }, + { + "epoch": 2.02, + "learning_rate": 4.8673692415535186e-05, + "loss": 1.327, + "step": 650 + }, + { + "epoch": 2.02, + "eval_loss": 1.6340641975402832, + "eval_runtime": 233.6965, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 650 + }, + { + "epoch": 2.02, + "learning_rate": 4.83910533187688e-05, + "loss": 1.3208, + "step": 651 + }, + { + "epoch": 2.03, + "learning_rate": 4.810897506144137e-05, + "loss": 1.2936, + "step": 652 + }, + { + "epoch": 2.03, + "learning_rate": 4.782746070892472e-05, + "loss": 1.323, + "step": 653 + }, + { + "epoch": 2.03, + "learning_rate": 4.754651332046274e-05, + "loss": 1.3304, + "step": 654 + }, + { + "epoch": 2.04, + "learning_rate": 4.726613594913796e-05, + "loss": 1.2426, + "step": 655 + }, + { + "epoch": 2.04, + "learning_rate": 4.698633164183853e-05, + "loss": 1.2882, + "step": 656 + }, + { + "epoch": 2.04, + "learning_rate": 4.670710343922504e-05, + "loss": 1.3273, + "step": 657 + }, + { + "epoch": 2.05, + "learning_rate": 4.6428454375697485e-05, + "loss": 1.3391, + "step": 658 + }, + { + "epoch": 2.05, + "learning_rate": 4.615038747936237e-05, + "loss": 1.3143, + "step": 659 + }, + { + "epoch": 2.05, + "learning_rate": 4.587290577199965e-05, + "loss": 1.2846, + "step": 660 + }, + { + "epoch": 2.06, + "learning_rate": 4.559601226902998e-05, + "loss": 1.2887, + "step": 661 + }, + { + "epoch": 2.06, + "learning_rate": 4.531970997948203e-05, + "loss": 1.3239, + "step": 662 + }, + { + "epoch": 2.06, + "learning_rate": 4.504400190595958e-05, + "loss": 1.3552, + "step": 663 + }, + { + "epoch": 2.07, + "learning_rate": 4.476889104460907e-05, + "loss": 1.3554, + "step": 664 + }, + { + "epoch": 2.07, + "learning_rate": 4.4494380385086986e-05, + "loss": 1.3333, + "step": 665 + }, + { + "epoch": 2.07, + "learning_rate": 4.422047291052728e-05, + "loss": 1.3107, + "step": 666 + }, + { + "epoch": 2.07, + "learning_rate": 4.3947171597509176e-05, + "loss": 1.3228, + "step": 667 + }, + { + "epoch": 2.08, + "learning_rate": 4.367447941602453e-05, + "loss": 1.3224, + "step": 668 + }, + { + "epoch": 2.08, + "learning_rate": 4.3402399329445855e-05, + "loss": 1.2844, + "step": 669 + }, + { + "epoch": 2.08, + "learning_rate": 4.3130934294493885e-05, + "loss": 1.3352, + "step": 670 + }, + { + "epoch": 2.09, + "learning_rate": 4.286008726120543e-05, + "loss": 1.3217, + "step": 671 + }, + { + "epoch": 2.09, + "learning_rate": 4.2589861172901634e-05, + "loss": 1.2976, + "step": 672 + }, + { + "epoch": 2.09, + "learning_rate": 4.232025896615559e-05, + "loss": 1.3108, + "step": 673 + }, + { + "epoch": 2.1, + "learning_rate": 4.2051283570760746e-05, + "loss": 1.2893, + "step": 674 + }, + { + "epoch": 2.1, + "learning_rate": 4.178293790969883e-05, + "loss": 1.3452, + "step": 675 + }, + { + "epoch": 2.1, + "learning_rate": 4.1515224899108164e-05, + "loss": 1.332, + "step": 676 + }, + { + "epoch": 2.11, + "learning_rate": 4.1248147448252185e-05, + "loss": 1.2998, + "step": 677 + }, + { + "epoch": 2.11, + "learning_rate": 4.098170845948736e-05, + "loss": 1.2952, + "step": 678 + }, + { + "epoch": 2.11, + "learning_rate": 4.071591082823215e-05, + "loss": 1.3512, + "step": 679 + }, + { + "epoch": 2.12, + "learning_rate": 4.045075744293525e-05, + "loss": 1.3571, + "step": 680 + }, + { + "epoch": 2.12, + "learning_rate": 4.01862511850442e-05, + "loss": 1.3415, + "step": 681 + }, + { + "epoch": 2.12, + "learning_rate": 3.992239492897429e-05, + "loss": 1.3264, + "step": 682 + }, + { + "epoch": 2.12, + "learning_rate": 3.965919154207708e-05, + "loss": 1.3013, + "step": 683 + }, + { + "epoch": 2.13, + "learning_rate": 3.939664388460932e-05, + "loss": 1.369, + "step": 684 + }, + { + "epoch": 2.13, + "learning_rate": 3.913475480970193e-05, + "loss": 1.2464, + "step": 685 + }, + { + "epoch": 2.13, + "learning_rate": 3.887352716332892e-05, + "loss": 1.3162, + "step": 686 + }, + { + "epoch": 2.14, + "learning_rate": 3.861296378427656e-05, + "loss": 1.3221, + "step": 687 + }, + { + "epoch": 2.14, + "learning_rate": 3.835306750411237e-05, + "loss": 1.3219, + "step": 688 + }, + { + "epoch": 2.14, + "learning_rate": 3.8093841147154475e-05, + "loss": 1.3446, + "step": 689 + }, + { + "epoch": 2.15, + "learning_rate": 3.783528753044093e-05, + "loss": 1.3667, + "step": 690 + }, + { + "epoch": 2.15, + "learning_rate": 3.757740946369901e-05, + "loss": 1.3098, + "step": 691 + }, + { + "epoch": 2.15, + "learning_rate": 3.732020974931471e-05, + "loss": 1.3017, + "step": 692 + }, + { + "epoch": 2.16, + "learning_rate": 3.7063691182302304e-05, + "loss": 1.3354, + "step": 693 + }, + { + "epoch": 2.16, + "learning_rate": 3.680785655027399e-05, + "loss": 1.3081, + "step": 694 + }, + { + "epoch": 2.16, + "learning_rate": 3.6552708633409613e-05, + "loss": 1.2563, + "step": 695 + }, + { + "epoch": 2.16, + "learning_rate": 3.6298250204426334e-05, + "loss": 1.307, + "step": 696 + }, + { + "epoch": 2.17, + "learning_rate": 3.6044484028548676e-05, + "loss": 1.2907, + "step": 697 + }, + { + "epoch": 2.17, + "learning_rate": 3.5791412863478326e-05, + "loss": 1.3023, + "step": 698 + }, + { + "epoch": 2.17, + "learning_rate": 3.553903945936421e-05, + "loss": 1.3144, + "step": 699 + }, + { + "epoch": 2.18, + "learning_rate": 3.528736655877264e-05, + "loss": 1.3015, + "step": 700 + }, + { + "epoch": 2.18, + "eval_loss": 1.6665308475494385, + "eval_runtime": 233.6943, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 700 + }, + { + "epoch": 2.18, + "learning_rate": 3.5036396896657455e-05, + "loss": 1.2943, + "step": 701 + }, + { + "epoch": 2.18, + "learning_rate": 3.478613320033042e-05, + "loss": 1.3333, + "step": 702 + }, + { + "epoch": 2.19, + "learning_rate": 3.453657818943142e-05, + "loss": 1.2983, + "step": 703 + }, + { + "epoch": 2.19, + "learning_rate": 3.4287734575898975e-05, + "loss": 1.3392, + "step": 704 + }, + { + "epoch": 2.19, + "learning_rate": 3.403960506394092e-05, + "loss": 1.2677, + "step": 705 + }, + { + "epoch": 2.2, + "learning_rate": 3.379219235000463e-05, + "loss": 1.3197, + "step": 706 + }, + { + "epoch": 2.2, + "learning_rate": 3.3545499122748216e-05, + "loss": 1.3343, + "step": 707 + }, + { + "epoch": 2.2, + "learning_rate": 3.329952806301092e-05, + "loss": 1.3591, + "step": 708 + }, + { + "epoch": 2.21, + "learning_rate": 3.305428184378413e-05, + "loss": 1.3272, + "step": 709 + }, + { + "epoch": 2.21, + "learning_rate": 3.280976313018239e-05, + "loss": 1.3499, + "step": 710 + }, + { + "epoch": 2.21, + "learning_rate": 3.256597457941429e-05, + "loss": 1.3371, + "step": 711 + }, + { + "epoch": 2.21, + "learning_rate": 3.232291884075373e-05, + "loss": 1.312, + "step": 712 + }, + { + "epoch": 2.22, + "learning_rate": 3.208059855551101e-05, + "loss": 1.3502, + "step": 713 + }, + { + "epoch": 2.22, + "learning_rate": 3.18390163570042e-05, + "loss": 1.3094, + "step": 714 + }, + { + "epoch": 2.22, + "learning_rate": 3.1598174870530604e-05, + "loss": 1.3181, + "step": 715 + }, + { + "epoch": 2.23, + "learning_rate": 3.1358076713338014e-05, + "loss": 1.3011, + "step": 716 + }, + { + "epoch": 2.23, + "learning_rate": 3.1118724494596405e-05, + "loss": 1.3054, + "step": 717 + }, + { + "epoch": 2.23, + "learning_rate": 3.0880120815369694e-05, + "loss": 1.3215, + "step": 718 + }, + { + "epoch": 2.24, + "learning_rate": 3.0642268268587136e-05, + "loss": 1.2908, + "step": 719 + }, + { + "epoch": 2.24, + "learning_rate": 3.0405169439015557e-05, + "loss": 1.3334, + "step": 720 + }, + { + "epoch": 2.24, + "learning_rate": 3.0168826903230906e-05, + "loss": 1.3275, + "step": 721 + }, + { + "epoch": 2.25, + "learning_rate": 2.9933243229590568e-05, + "loss": 1.3329, + "step": 722 + }, + { + "epoch": 2.25, + "learning_rate": 2.969842097820519e-05, + "loss": 1.3185, + "step": 723 + }, + { + "epoch": 2.25, + "learning_rate": 2.9464362700910943e-05, + "loss": 1.3443, + "step": 724 + }, + { + "epoch": 2.26, + "learning_rate": 2.9231070941241988e-05, + "loss": 1.3034, + "step": 725 + }, + { + "epoch": 2.26, + "learning_rate": 2.899854823440241e-05, + "loss": 1.304, + "step": 726 + }, + { + "epoch": 2.26, + "learning_rate": 2.8766797107239164e-05, + "loss": 1.3136, + "step": 727 + }, + { + "epoch": 2.26, + "learning_rate": 2.8535820078214236e-05, + "loss": 1.2894, + "step": 728 + }, + { + "epoch": 2.27, + "learning_rate": 2.8305619657377413e-05, + "loss": 1.3303, + "step": 729 + }, + { + "epoch": 2.27, + "learning_rate": 2.8076198346339113e-05, + "loss": 1.3158, + "step": 730 + }, + { + "epoch": 2.27, + "learning_rate": 2.7847558638242964e-05, + "loss": 1.3071, + "step": 731 + }, + { + "epoch": 2.28, + "learning_rate": 2.7619703017738917e-05, + "loss": 1.2951, + "step": 732 + }, + { + "epoch": 2.28, + "learning_rate": 2.7392633960956127e-05, + "loss": 1.3138, + "step": 733 + }, + { + "epoch": 2.28, + "learning_rate": 2.7166353935476085e-05, + "loss": 1.3523, + "step": 734 + }, + { + "epoch": 2.29, + "learning_rate": 2.694086540030587e-05, + "loss": 1.2937, + "step": 735 + }, + { + "epoch": 2.29, + "learning_rate": 2.671617080585127e-05, + "loss": 1.3493, + "step": 736 + }, + { + "epoch": 2.29, + "learning_rate": 2.6492272593890267e-05, + "loss": 1.309, + "step": 737 + }, + { + "epoch": 2.3, + "learning_rate": 2.6269173197546527e-05, + "loss": 1.3188, + "step": 738 + }, + { + "epoch": 2.3, + "learning_rate": 2.6046875041262852e-05, + "loss": 1.3202, + "step": 739 + }, + { + "epoch": 2.3, + "learning_rate": 2.5825380540774914e-05, + "loss": 1.359, + "step": 740 + }, + { + "epoch": 2.3, + "learning_rate": 2.560469210308497e-05, + "loss": 1.2837, + "step": 741 + }, + { + "epoch": 2.31, + "learning_rate": 2.5384812126435697e-05, + "loss": 1.3195, + "step": 742 + }, + { + "epoch": 2.31, + "learning_rate": 2.5165743000284213e-05, + "loss": 1.2797, + "step": 743 + }, + { + "epoch": 2.31, + "learning_rate": 2.4947487105275945e-05, + "loss": 1.3656, + "step": 744 + }, + { + "epoch": 2.32, + "learning_rate": 2.4730046813218987e-05, + "loss": 1.3094, + "step": 745 + }, + { + "epoch": 2.32, + "learning_rate": 2.451342448705811e-05, + "loss": 1.3176, + "step": 746 + }, + { + "epoch": 2.32, + "learning_rate": 2.4297622480849104e-05, + "loss": 1.3318, + "step": 747 + }, + { + "epoch": 2.33, + "learning_rate": 2.408264313973343e-05, + "loss": 1.3367, + "step": 748 + }, + { + "epoch": 2.33, + "learning_rate": 2.3868488799912414e-05, + "loss": 1.2717, + "step": 749 + }, + { + "epoch": 2.33, + "learning_rate": 2.3655161788622138e-05, + "loss": 1.3328, + "step": 750 + }, + { + "epoch": 2.33, + "eval_loss": 1.6713805198669434, + "eval_runtime": 233.7116, + "eval_samples_per_second": 16.358, + "eval_steps_per_second": 4.091, + "step": 750 + }, + { + "epoch": 2.34, + "learning_rate": 2.344266442410794e-05, + "loss": 1.3325, + "step": 751 + }, + { + "epoch": 2.34, + "learning_rate": 2.323099901559931e-05, + "loss": 1.3277, + "step": 752 + }, + { + "epoch": 2.34, + "learning_rate": 2.302016786328488e-05, + "loss": 1.3567, + "step": 753 + }, + { + "epoch": 2.35, + "learning_rate": 2.281017325828716e-05, + "loss": 1.3087, + "step": 754 + }, + { + "epoch": 2.35, + "learning_rate": 2.260101748263803e-05, + "loss": 1.3173, + "step": 755 + }, + { + "epoch": 2.35, + "learning_rate": 2.2392702809253596e-05, + "loss": 1.3234, + "step": 756 + }, + { + "epoch": 2.35, + "learning_rate": 2.218523150190962e-05, + "loss": 1.3649, + "step": 757 + }, + { + "epoch": 2.36, + "learning_rate": 2.1978605815217025e-05, + "loss": 1.3433, + "step": 758 + }, + { + "epoch": 2.36, + "learning_rate": 2.177282799459719e-05, + "loss": 1.2992, + "step": 759 + }, + { + "epoch": 2.36, + "learning_rate": 2.1567900276257703e-05, + "loss": 1.3004, + "step": 760 + }, + { + "epoch": 2.37, + "learning_rate": 2.1363824887167993e-05, + "loss": 1.2894, + "step": 761 + }, + { + "epoch": 2.37, + "learning_rate": 2.1160604045035115e-05, + "loss": 1.3151, + "step": 762 + }, + { + "epoch": 2.37, + "learning_rate": 2.0958239958279756e-05, + "loss": 1.2694, + "step": 763 + }, + { + "epoch": 2.38, + "learning_rate": 2.0756734826012104e-05, + "loss": 1.2979, + "step": 764 + }, + { + "epoch": 2.38, + "learning_rate": 2.0556090838007957e-05, + "loss": 1.3187, + "step": 765 + }, + { + "epoch": 2.38, + "learning_rate": 2.0356310174685124e-05, + "loss": 1.3255, + "step": 766 + }, + { + "epoch": 2.39, + "learning_rate": 2.0157395007079428e-05, + "loss": 1.3623, + "step": 767 + }, + { + "epoch": 2.39, + "learning_rate": 1.9959347496821333e-05, + "loss": 1.317, + "step": 768 + }, + { + "epoch": 2.39, + "learning_rate": 1.9762169796112397e-05, + "loss": 1.3102, + "step": 769 + }, + { + "epoch": 2.4, + "learning_rate": 1.956586404770182e-05, + "loss": 1.244, + "step": 770 + }, + { + "epoch": 2.4, + "learning_rate": 1.937043238486329e-05, + "loss": 1.3051, + "step": 771 + }, + { + "epoch": 2.4, + "learning_rate": 1.9175876931371626e-05, + "loss": 1.2869, + "step": 772 + }, + { + "epoch": 2.4, + "learning_rate": 1.898219980147993e-05, + "loss": 1.3365, + "step": 773 + }, + { + "epoch": 2.41, + "learning_rate": 1.878940309989633e-05, + "loss": 1.3091, + "step": 774 + }, + { + "epoch": 2.41, + "learning_rate": 1.859748892176133e-05, + "loss": 1.3401, + "step": 775 + }, + { + "epoch": 2.41, + "learning_rate": 1.840645935262497e-05, + "loss": 1.3562, + "step": 776 + }, + { + "epoch": 2.42, + "learning_rate": 1.8216316468424098e-05, + "loss": 1.3201, + "step": 777 + }, + { + "epoch": 2.42, + "learning_rate": 1.8027062335459977e-05, + "loss": 1.2757, + "step": 778 + }, + { + "epoch": 2.42, + "learning_rate": 1.7838699010375625e-05, + "loss": 1.3541, + "step": 779 + }, + { + "epoch": 2.43, + "learning_rate": 1.7651228540133623e-05, + "loss": 1.3491, + "step": 780 + }, + { + "epoch": 2.43, + "learning_rate": 1.7464652961993768e-05, + "loss": 1.2903, + "step": 781 + }, + { + "epoch": 2.43, + "learning_rate": 1.727897430349097e-05, + "loss": 1.3879, + "step": 782 + }, + { + "epoch": 2.44, + "learning_rate": 1.7094194582413326e-05, + "loss": 1.3311, + "step": 783 + }, + { + "epoch": 2.44, + "learning_rate": 1.6910315806779987e-05, + "loss": 1.34, + "step": 784 + }, + { + "epoch": 2.44, + "learning_rate": 1.6727339974819456e-05, + "loss": 1.3331, + "step": 785 + }, + { + "epoch": 2.44, + "learning_rate": 1.6545269074947922e-05, + "loss": 1.3164, + "step": 786 + }, + { + "epoch": 2.45, + "learning_rate": 1.636410508574753e-05, + "loss": 1.3505, + "step": 787 + }, + { + "epoch": 2.45, + "learning_rate": 1.618384997594494e-05, + "loss": 1.2556, + "step": 788 + }, + { + "epoch": 2.45, + "learning_rate": 1.6004505704389983e-05, + "loss": 1.3023, + "step": 789 + }, + { + "epoch": 2.46, + "learning_rate": 1.5826074220034226e-05, + "loss": 1.3524, + "step": 790 + }, + { + "epoch": 2.46, + "learning_rate": 1.5648557461910018e-05, + "loss": 1.3215, + "step": 791 + }, + { + "epoch": 2.46, + "learning_rate": 1.547195735910919e-05, + "loss": 1.3593, + "step": 792 + }, + { + "epoch": 2.47, + "learning_rate": 1.5296275830762206e-05, + "loss": 1.3482, + "step": 793 + }, + { + "epoch": 2.47, + "learning_rate": 1.5121514786017365e-05, + "loss": 1.3521, + "step": 794 + }, + { + "epoch": 2.47, + "learning_rate": 1.4947676124019839e-05, + "loss": 1.3138, + "step": 795 + }, + { + "epoch": 2.48, + "learning_rate": 1.4774761733891319e-05, + "loss": 1.3701, + "step": 796 + }, + { + "epoch": 2.48, + "learning_rate": 1.4602773494709254e-05, + "loss": 1.3408, + "step": 797 + }, + { + "epoch": 2.48, + "learning_rate": 1.4431713275486602e-05, + "loss": 1.343, + "step": 798 + }, + { + "epoch": 2.49, + "learning_rate": 1.4261582935151352e-05, + "loss": 1.2744, + "step": 799 + }, + { + "epoch": 2.49, + "learning_rate": 1.4092384322526442e-05, + "loss": 1.3453, + "step": 800 + }, + { + "epoch": 2.49, + "eval_loss": 1.6718111038208008, + "eval_runtime": 233.7605, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 800 + }, + { + "epoch": 2.49, + "learning_rate": 1.3924119276309677e-05, + "loss": 1.2647, + "step": 801 + }, + { + "epoch": 2.49, + "learning_rate": 1.3756789625053601e-05, + "loss": 1.321, + "step": 802 + }, + { + "epoch": 2.5, + "learning_rate": 1.3590397187145853e-05, + "loss": 1.3403, + "step": 803 + }, + { + "epoch": 2.5, + "learning_rate": 1.3424943770789211e-05, + "loss": 1.3191, + "step": 804 + }, + { + "epoch": 2.5, + "learning_rate": 1.3260431173982001e-05, + "loss": 1.2983, + "step": 805 + }, + { + "epoch": 2.51, + "learning_rate": 1.3096861184498643e-05, + "loss": 1.2955, + "step": 806 + }, + { + "epoch": 2.51, + "learning_rate": 1.293423557987009e-05, + "loss": 1.3297, + "step": 807 + }, + { + "epoch": 2.51, + "learning_rate": 1.2772556127364588e-05, + "loss": 1.3273, + "step": 808 + }, + { + "epoch": 2.52, + "learning_rate": 1.2611824583968457e-05, + "loss": 1.2867, + "step": 809 + }, + { + "epoch": 2.52, + "learning_rate": 1.2452042696366984e-05, + "loss": 1.3132, + "step": 810 + }, + { + "epoch": 2.52, + "learning_rate": 1.229321220092552e-05, + "loss": 1.323, + "step": 811 + }, + { + "epoch": 2.53, + "learning_rate": 1.2135334823670452e-05, + "loss": 1.3332, + "step": 812 + }, + { + "epoch": 2.53, + "learning_rate": 1.1978412280270568e-05, + "loss": 1.2775, + "step": 813 + }, + { + "epoch": 2.53, + "learning_rate": 1.182244627601845e-05, + "loss": 1.3049, + "step": 814 + }, + { + "epoch": 2.53, + "learning_rate": 1.1667438505811801e-05, + "loss": 1.3206, + "step": 815 + }, + { + "epoch": 2.54, + "learning_rate": 1.1513390654135103e-05, + "loss": 1.386, + "step": 816 + }, + { + "epoch": 2.54, + "learning_rate": 1.1360304395041343e-05, + "loss": 1.3292, + "step": 817 + }, + { + "epoch": 2.54, + "learning_rate": 1.1208181392133766e-05, + "loss": 1.3249, + "step": 818 + }, + { + "epoch": 2.55, + "learning_rate": 1.1057023298547864e-05, + "loss": 1.2934, + "step": 819 + }, + { + "epoch": 2.55, + "learning_rate": 1.0906831756933267e-05, + "loss": 1.3471, + "step": 820 + }, + { + "epoch": 2.55, + "learning_rate": 1.0757608399436125e-05, + "loss": 1.3505, + "step": 821 + }, + { + "epoch": 2.56, + "learning_rate": 1.0609354847681152e-05, + "loss": 1.283, + "step": 822 + }, + { + "epoch": 2.56, + "learning_rate": 1.0462072712754035e-05, + "loss": 1.2679, + "step": 823 + }, + { + "epoch": 2.56, + "learning_rate": 1.0315763595184113e-05, + "loss": 1.3317, + "step": 824 + }, + { + "epoch": 2.57, + "learning_rate": 1.0170429084926746e-05, + "loss": 1.308, + "step": 825 + }, + { + "epoch": 2.57, + "learning_rate": 1.0026070761346229e-05, + "loss": 1.2816, + "step": 826 + }, + { + "epoch": 2.57, + "learning_rate": 9.882690193198463e-06, + "loss": 1.2712, + "step": 827 + }, + { + "epoch": 2.58, + "learning_rate": 9.740288938613995e-06, + "loss": 1.3133, + "step": 828 + }, + { + "epoch": 2.58, + "learning_rate": 9.598868545081153e-06, + "loss": 1.257, + "step": 829 + }, + { + "epoch": 2.58, + "learning_rate": 9.458430549429032e-06, + "loss": 1.3271, + "step": 830 + }, + { + "epoch": 2.58, + "learning_rate": 9.318976477811026e-06, + "loss": 1.3329, + "step": 831 + }, + { + "epoch": 2.59, + "learning_rate": 9.18050784568808e-06, + "loss": 1.2939, + "step": 832 + }, + { + "epoch": 2.59, + "learning_rate": 9.043026157812229e-06, + "loss": 1.3111, + "step": 833 + }, + { + "epoch": 2.59, + "learning_rate": 8.906532908210396e-06, + "loss": 1.3164, + "step": 834 + }, + { + "epoch": 2.6, + "learning_rate": 8.771029580167967e-06, + "loss": 1.3162, + "step": 835 + }, + { + "epoch": 2.6, + "learning_rate": 8.636517646212761e-06, + "loss": 1.303, + "step": 836 + }, + { + "epoch": 2.6, + "learning_rate": 8.502998568099063e-06, + "loss": 1.3545, + "step": 837 + }, + { + "epoch": 2.61, + "learning_rate": 8.370473796791622e-06, + "loss": 1.3224, + "step": 838 + }, + { + "epoch": 2.61, + "learning_rate": 8.238944772450064e-06, + "loss": 1.3146, + "step": 839 + }, + { + "epoch": 2.61, + "learning_rate": 8.108412924413056e-06, + "loss": 1.3171, + "step": 840 + }, + { + "epoch": 2.62, + "learning_rate": 7.978879671182848e-06, + "loss": 1.3209, + "step": 841 + }, + { + "epoch": 2.62, + "learning_rate": 7.850346420409949e-06, + "loss": 1.3143, + "step": 842 + }, + { + "epoch": 2.62, + "learning_rate": 7.722814568877646e-06, + "loss": 1.3112, + "step": 843 + }, + { + "epoch": 2.63, + "learning_rate": 7.596285502486966e-06, + "loss": 1.3056, + "step": 844 + }, + { + "epoch": 2.63, + "learning_rate": 7.4707605962415775e-06, + "loss": 1.3151, + "step": 845 + }, + { + "epoch": 2.63, + "learning_rate": 7.346241214232819e-06, + "loss": 1.3774, + "step": 846 + }, + { + "epoch": 2.63, + "learning_rate": 7.222728709624949e-06, + "loss": 1.3432, + "step": 847 + }, + { + "epoch": 2.64, + "learning_rate": 7.100224424640312e-06, + "loss": 1.3036, + "step": 848 + }, + { + "epoch": 2.64, + "learning_rate": 6.978729690544927e-06, + "loss": 1.2911, + "step": 849 + }, + { + "epoch": 2.64, + "learning_rate": 6.858245827633869e-06, + "loss": 1.3458, + "step": 850 + }, + { + "epoch": 2.64, + "eval_loss": 1.6725014448165894, + "eval_runtime": 233.7534, + "eval_samples_per_second": 16.355, + "eval_steps_per_second": 4.09, + "step": 850 + }, + { + "epoch": 2.65, + "learning_rate": 6.7387741452169415e-06, + "loss": 1.2943, + "step": 851 + }, + { + "epoch": 2.65, + "learning_rate": 6.6203159416045605e-06, + "loss": 1.3108, + "step": 852 + }, + { + "epoch": 2.65, + "learning_rate": 6.502872504093527e-06, + "loss": 1.2836, + "step": 853 + }, + { + "epoch": 2.66, + "learning_rate": 6.3864451089530985e-06, + "loss": 1.3342, + "step": 854 + }, + { + "epoch": 2.66, + "learning_rate": 6.271035021411098e-06, + "loss": 1.304, + "step": 855 + }, + { + "epoch": 2.66, + "learning_rate": 6.156643495640157e-06, + "loss": 1.3163, + "step": 856 + }, + { + "epoch": 2.67, + "learning_rate": 6.043271774744086e-06, + "loss": 1.3385, + "step": 857 + }, + { + "epoch": 2.67, + "learning_rate": 5.930921090744402e-06, + "loss": 1.2856, + "step": 858 + }, + { + "epoch": 2.67, + "learning_rate": 5.81959266456692e-06, + "loss": 1.3414, + "step": 859 + }, + { + "epoch": 2.67, + "learning_rate": 5.709287706028454e-06, + "loss": 1.3353, + "step": 860 + }, + { + "epoch": 2.68, + "learning_rate": 5.600007413823693e-06, + "loss": 1.3286, + "step": 861 + }, + { + "epoch": 2.68, + "learning_rate": 5.491752975512232e-06, + "loss": 1.3089, + "step": 862 + }, + { + "epoch": 2.68, + "learning_rate": 5.38452556750555e-06, + "loss": 1.3385, + "step": 863 + }, + { + "epoch": 2.69, + "learning_rate": 5.278326355054308e-06, + "loss": 1.34, + "step": 864 + }, + { + "epoch": 2.69, + "learning_rate": 5.173156492235665e-06, + "loss": 1.328, + "step": 865 + }, + { + "epoch": 2.69, + "learning_rate": 5.069017121940733e-06, + "loss": 1.2771, + "step": 866 + }, + { + "epoch": 2.7, + "learning_rate": 4.96590937586221e-06, + "loss": 1.2927, + "step": 867 + }, + { + "epoch": 2.7, + "learning_rate": 4.863834374481946e-06, + "loss": 1.2976, + "step": 868 + }, + { + "epoch": 2.7, + "learning_rate": 4.762793227058915e-06, + "loss": 1.3291, + "step": 869 + }, + { + "epoch": 2.71, + "learning_rate": 4.662787031617122e-06, + "loss": 1.3162, + "step": 870 + }, + { + "epoch": 2.71, + "learning_rate": 4.563816874933547e-06, + "loss": 1.287, + "step": 871 + }, + { + "epoch": 2.71, + "learning_rate": 4.465883832526552e-06, + "loss": 1.3419, + "step": 872 + }, + { + "epoch": 2.72, + "learning_rate": 4.368988968644006e-06, + "loss": 1.3645, + "step": 873 + }, + { + "epoch": 2.72, + "learning_rate": 4.2731333362518e-06, + "loss": 1.3361, + "step": 874 + }, + { + "epoch": 2.72, + "learning_rate": 4.1783179770224275e-06, + "loss": 1.3006, + "step": 875 + }, + { + "epoch": 2.72, + "learning_rate": 4.084543921323591e-06, + "loss": 1.2943, + "step": 876 + }, + { + "epoch": 2.73, + "learning_rate": 3.991812188207112e-06, + "loss": 1.3161, + "step": 877 + }, + { + "epoch": 2.73, + "learning_rate": 3.90012378539768e-06, + "loss": 1.3414, + "step": 878 + }, + { + "epoch": 2.73, + "learning_rate": 3.8094797092821264e-06, + "loss": 1.3094, + "step": 879 + }, + { + "epoch": 2.74, + "learning_rate": 3.7198809448984128e-06, + "loss": 1.2949, + "step": 880 + }, + { + "epoch": 2.74, + "learning_rate": 3.6313284659250215e-06, + "loss": 1.3336, + "step": 881 + }, + { + "epoch": 2.74, + "learning_rate": 3.5438232346703627e-06, + "loss": 1.3238, + "step": 882 + }, + { + "epoch": 2.75, + "learning_rate": 3.457366202062284e-06, + "loss": 1.3209, + "step": 883 + }, + { + "epoch": 2.75, + "learning_rate": 3.371958307637746e-06, + "loss": 1.352, + "step": 884 + }, + { + "epoch": 2.75, + "learning_rate": 3.287600479532649e-06, + "loss": 1.3234, + "step": 885 + }, + { + "epoch": 2.76, + "learning_rate": 3.204293634471689e-06, + "loss": 1.2995, + "step": 886 + }, + { + "epoch": 2.76, + "learning_rate": 3.1220386777584764e-06, + "loss": 1.3228, + "step": 887 + }, + { + "epoch": 2.76, + "learning_rate": 3.0408365032656093e-06, + "loss": 1.3059, + "step": 888 + }, + { + "epoch": 2.77, + "learning_rate": 2.960687993425004e-06, + "loss": 1.2848, + "step": 889 + }, + { + "epoch": 2.77, + "learning_rate": 2.8815940192183033e-06, + "loss": 1.3639, + "step": 890 + }, + { + "epoch": 2.77, + "learning_rate": 2.803555440167427e-06, + "loss": 1.3454, + "step": 891 + }, + { + "epoch": 2.77, + "learning_rate": 2.7265731043251807e-06, + "loss": 1.317, + "step": 892 + }, + { + "epoch": 2.78, + "learning_rate": 2.6506478482661077e-06, + "loss": 1.3105, + "step": 893 + }, + { + "epoch": 2.78, + "learning_rate": 2.575780497077307e-06, + "loss": 1.2993, + "step": 894 + }, + { + "epoch": 2.78, + "learning_rate": 2.501971864349606e-06, + "loss": 1.3332, + "step": 895 + }, + { + "epoch": 2.79, + "learning_rate": 2.429222752168547e-06, + "loss": 1.3281, + "step": 896 + }, + { + "epoch": 2.79, + "learning_rate": 2.357533951105839e-06, + "loss": 1.3206, + "step": 897 + }, + { + "epoch": 2.79, + "learning_rate": 2.28690624021064e-06, + "loss": 1.3227, + "step": 898 + }, + { + "epoch": 2.8, + "learning_rate": 2.217340387001121e-06, + "loss": 1.3449, + "step": 899 + }, + { + "epoch": 2.8, + "learning_rate": 2.1488371474562063e-06, + "loss": 1.3016, + "step": 900 + }, + { + "epoch": 2.8, + "eval_loss": 1.6737442016601562, + "eval_runtime": 233.8114, + "eval_samples_per_second": 16.351, + "eval_steps_per_second": 4.089, + "step": 900 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 2.5238444917653504e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-900/training_args.bin b/checkpoint-900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/checkpoint-950/README.md b/checkpoint-950/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e3637f645b79c1dff559d466047b102e3892f5d --- /dev/null +++ b/checkpoint-950/README.md @@ -0,0 +1,21 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: False +- load_in_4bit: True +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: nf4 +- bnb_4bit_use_double_quant: True +- bnb_4bit_compute_dtype: bfloat16 +### Framework versions + + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-950/adapter_config.json b/checkpoint-950/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..70eb98996765a9a8543304018a2698a5bf8a4fce --- /dev/null +++ b/checkpoint-950/adapter_config.json @@ -0,0 +1,28 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "./mistralai_Mistral-7B-v0.1", + "bias": "none", + "fan_in_fan_out": null, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.05, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "o_proj", + "k_proj", + "up_proj", + "v_proj", + "q_proj", + "gate_proj", + "down_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-950/adapter_model.bin b/checkpoint-950/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..7fb03a20955b86701b5942cdf0d5d6cc7d902791 --- /dev/null +++ b/checkpoint-950/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:02949f3504ab3cd9d1ea4c0574cf35228320c0c96ccbfc979491373a9c57d3c1 +size 84046925 diff --git a/checkpoint-950/optimizer.pt b/checkpoint-950/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..487c35f147638cc407779d30bd955ca839f38b2b --- /dev/null +++ b/checkpoint-950/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92aee4f6af905b194176f069270cee6bda1658207d02008dba479ca953cd82ba +size 168039557 diff --git a/checkpoint-950/rng_state.pth b/checkpoint-950/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..611e430b62eb36a88c9fa73ac90e15ef08092c84 --- /dev/null +++ b/checkpoint-950/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed25a447c6495e6d586bc9298d04c6fe7ccada8fc8a27134aa2767255fa90a92 +size 14575 diff --git a/checkpoint-950/scheduler.pt b/checkpoint-950/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..bad09017dff3318e7f37457f7d2885617eec48ee --- /dev/null +++ b/checkpoint-950/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2f21d8dd6eb1f167e3a1ca07a9980ed88c4590fe6d2f957237f091df3c596a7b +size 627 diff --git a/checkpoint-950/trainer_state.json b/checkpoint-950/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..540d0c16d6d97d22df183bb8c3cffdf0889315b0 --- /dev/null +++ b/checkpoint-950/trainer_state.json @@ -0,0 +1,5871 @@ +{ + "best_metric": 1.6023043394088745, + "best_model_checkpoint": "./lora-out/checkpoint-300", + "epoch": 2.9548989113530326, + "eval_steps": 50, + "global_step": 950, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "learning_rate": 2e-05, + "loss": 1.7924, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 4e-05, + "loss": 1.8083, + "step": 2 + }, + { + "epoch": 0.01, + "learning_rate": 6e-05, + "loss": 1.8177, + "step": 3 + }, + { + "epoch": 0.01, + "learning_rate": 8e-05, + "loss": 1.7595, + "step": 4 + }, + { + "epoch": 0.02, + "learning_rate": 0.0001, + "loss": 1.6598, + "step": 5 + }, + { + "epoch": 0.02, + "learning_rate": 0.00012, + "loss": 1.6919, + "step": 6 + }, + { + "epoch": 0.02, + "learning_rate": 0.00014, + "loss": 1.6706, + "step": 7 + }, + { + "epoch": 0.02, + "learning_rate": 0.00016, + "loss": 1.6879, + "step": 8 + }, + { + "epoch": 0.03, + "learning_rate": 0.00018, + "loss": 1.7051, + "step": 9 + }, + { + "epoch": 0.03, + "learning_rate": 0.0002, + "loss": 1.7022, + "step": 10 + }, + { + "epoch": 0.03, + "learning_rate": 0.000199999456645141, + "loss": 1.6809, + "step": 11 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999782658646859, + "loss": 1.6098, + "step": 12 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001999951098416968, + "loss": 1.7014, + "step": 13 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019999130644034888, + "loss": 1.5885, + "step": 14 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998641642375657, + "loss": 1.6243, + "step": 15 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019998043984506027, + "loss": 1.6484, + "step": 16 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019997337676920803, + "loss": 1.6093, + "step": 17 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019996522727295496, + "loss": 1.6173, + "step": 18 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019995599144486247, + "loss": 1.646, + "step": 19 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019994566938529712, + "loss": 1.6469, + "step": 20 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019993426120642983, + "loss": 1.6564, + "step": 21 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019992176703223432, + "loss": 1.5901, + "step": 22 + }, + { + "epoch": 0.07, + "learning_rate": 0.000199908186998486, + "loss": 1.664, + "step": 23 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019989352125276047, + "loss": 1.6275, + "step": 24 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019987776995443178, + "loss": 1.5839, + "step": 25 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019986093327467076, + "loss": 1.5611, + "step": 26 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019984301139644334, + "loss": 1.669, + "step": 27 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001998240045145083, + "loss": 1.5641, + "step": 28 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019980391283541522, + "loss": 1.6023, + "step": 29 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019978273657750238, + "loss": 1.6309, + "step": 30 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001997604759708942, + "loss": 1.6353, + "step": 31 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019973713125749884, + "loss": 1.6328, + "step": 32 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019971270269100564, + "loss": 1.5683, + "step": 33 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019968719053688213, + "loss": 1.6217, + "step": 34 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001996605950723714, + "loss": 1.5734, + "step": 35 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019963291658648896, + "loss": 1.6162, + "step": 36 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019960415538001957, + "loss": 1.5922, + "step": 37 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001995743117655141, + "loss": 1.5806, + "step": 38 + }, + { + "epoch": 0.12, + "learning_rate": 0.000199543386067286, + "loss": 1.5938, + "step": 39 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019951137862140778, + "loss": 1.6386, + "step": 40 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019947828977570756, + "loss": 1.6476, + "step": 41 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019944411988976496, + "loss": 1.6557, + "step": 42 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019940886933490749, + "loss": 1.5836, + "step": 43 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019937253849420635, + "loss": 1.6421, + "step": 44 + }, + { + "epoch": 0.14, + "learning_rate": 0.0001993351277624723, + "loss": 1.629, + "step": 45 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019929663754625145, + "loss": 1.6392, + "step": 46 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019925706826382064, + "loss": 1.5677, + "step": 47 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019921642034518317, + "loss": 1.6144, + "step": 48 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019917469423206389, + "loss": 1.6068, + "step": 49 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019913189037790456, + "loss": 1.6421, + "step": 50 + }, + { + "epoch": 0.16, + "eval_loss": 1.621693730354309, + "eval_runtime": 233.7603, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 50 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990880092478588, + "loss": 1.6172, + "step": 51 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001990430513187871, + "loss": 1.6095, + "step": 52 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019899701707925166, + "loss": 1.5967, + "step": 53 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019894990702951106, + "loss": 1.617, + "step": 54 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019890172168151473, + "loss": 1.5932, + "step": 55 + }, + { + "epoch": 0.17, + "learning_rate": 0.0001988524615588976, + "loss": 1.6548, + "step": 56 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019880212719697413, + "loss": 1.6033, + "step": 57 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019875071914273278, + "loss": 1.6063, + "step": 58 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019869823795482986, + "loss": 1.6107, + "step": 59 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019864468420358354, + "loss": 1.5758, + "step": 60 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019859005847096763, + "loss": 1.5723, + "step": 61 + }, + { + "epoch": 0.19, + "learning_rate": 0.00019853436135060527, + "loss": 1.542, + "step": 62 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019847759344776252, + "loss": 1.5611, + "step": 63 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019841975537934162, + "loss": 1.6157, + "step": 64 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019836084777387458, + "loss": 1.5589, + "step": 65 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019830087127151598, + "loss": 1.6077, + "step": 66 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019823982652403634, + "loss": 1.5473, + "step": 67 + }, + { + "epoch": 0.21, + "learning_rate": 0.00019817771419481487, + "loss": 1.6265, + "step": 68 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001981145349588323, + "loss": 1.6074, + "step": 69 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019805028950266348, + "loss": 1.6195, + "step": 70 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019798497852447006, + "loss": 1.5876, + "step": 71 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001979186027339928, + "loss": 1.5978, + "step": 72 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019785116285254381, + "loss": 1.533, + "step": 73 + }, + { + "epoch": 0.23, + "learning_rate": 0.00019778265961299888, + "loss": 1.5888, + "step": 74 + }, + { + "epoch": 0.23, + "learning_rate": 0.0001977130937597894, + "loss": 1.6211, + "step": 75 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019764246604889415, + "loss": 1.6091, + "step": 76 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019757077724783147, + "loss": 1.6012, + "step": 77 + }, + { + "epoch": 0.24, + "learning_rate": 0.0001974980281356504, + "loss": 1.6401, + "step": 78 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001974242195029227, + "loss": 1.6111, + "step": 79 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019734935215173392, + "loss": 1.6208, + "step": 80 + }, + { + "epoch": 0.25, + "learning_rate": 0.00019727342689567482, + "loss": 1.6038, + "step": 81 + }, + { + "epoch": 0.26, + "learning_rate": 0.00019719644455983256, + "loss": 1.5915, + "step": 82 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001971184059807817, + "loss": 1.5872, + "step": 83 + }, + { + "epoch": 0.26, + "learning_rate": 0.000197039312006575, + "loss": 1.5984, + "step": 84 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001969591634967344, + "loss": 1.5996, + "step": 85 + }, + { + "epoch": 0.27, + "learning_rate": 0.00019687796132224152, + "loss": 1.6056, + "step": 86 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967957063655283, + "loss": 1.6099, + "step": 87 + }, + { + "epoch": 0.27, + "learning_rate": 0.0001967123995204674, + "loss": 1.6295, + "step": 88 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019662804169236225, + "loss": 1.5482, + "step": 89 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019654263379793773, + "loss": 1.5781, + "step": 90 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019645617676532963, + "loss": 1.5954, + "step": 91 + }, + { + "epoch": 0.29, + "learning_rate": 0.000196368671534075, + "loss": 1.619, + "step": 92 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001962801190551016, + "loss": 1.6153, + "step": 93 + }, + { + "epoch": 0.29, + "learning_rate": 0.0001961905202907179, + "loss": 1.6008, + "step": 94 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019609987621460232, + "loss": 1.5891, + "step": 95 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001960081878117929, + "loss": 1.6438, + "step": 96 + }, + { + "epoch": 0.3, + "learning_rate": 0.0001959154560786764, + "loss": 1.5576, + "step": 97 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019582168202297758, + "loss": 1.646, + "step": 98 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019572686666374822, + "loss": 1.6269, + "step": 99 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019563101103135602, + "loss": 1.6288, + "step": 100 + }, + { + "epoch": 0.31, + "eval_loss": 1.6143836975097656, + "eval_runtime": 233.6412, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 100 + }, + { + "epoch": 0.31, + "learning_rate": 0.00019553411616747348, + "loss": 1.5667, + "step": 101 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019543618312506647, + "loss": 1.6221, + "step": 102 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001953372129683829, + "loss": 1.5992, + "step": 103 + }, + { + "epoch": 0.32, + "learning_rate": 0.0001952372067729411, + "loss": 1.6138, + "step": 104 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019513616562551807, + "loss": 1.51, + "step": 105 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019503409062413782, + "loss": 1.6227, + "step": 106 + }, + { + "epoch": 0.33, + "learning_rate": 0.00019493098287805927, + "loss": 1.6014, + "step": 107 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019482684350776434, + "loss": 1.625, + "step": 108 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001947216736449457, + "loss": 1.6109, + "step": 109 + }, + { + "epoch": 0.34, + "learning_rate": 0.0001946154744324945, + "loss": 1.62, + "step": 110 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019450824702448778, + "loss": 1.5878, + "step": 111 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001943999925861763, + "loss": 1.6264, + "step": 112 + }, + { + "epoch": 0.35, + "learning_rate": 0.00019429071229397157, + "loss": 1.6186, + "step": 113 + }, + { + "epoch": 0.35, + "learning_rate": 0.0001941804073354331, + "loss": 1.6363, + "step": 114 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019406907890925562, + "loss": 1.5341, + "step": 115 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019395672822525593, + "loss": 1.5986, + "step": 116 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019384335650435985, + "loss": 1.6181, + "step": 117 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001937289649785889, + "loss": 1.6118, + "step": 118 + }, + { + "epoch": 0.37, + "learning_rate": 0.0001936135548910469, + "loss": 1.6404, + "step": 119 + }, + { + "epoch": 0.37, + "learning_rate": 0.00019349712749590649, + "loss": 1.583, + "step": 120 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019337968405839547, + "loss": 1.5827, + "step": 121 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019326122585478308, + "loss": 1.6392, + "step": 122 + }, + { + "epoch": 0.38, + "learning_rate": 0.00019314175417236616, + "loss": 1.5861, + "step": 123 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019302127030945508, + "loss": 1.5738, + "step": 124 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001928997755753597, + "loss": 1.5915, + "step": 125 + }, + { + "epoch": 0.39, + "learning_rate": 0.00019277727129037508, + "loss": 1.617, + "step": 126 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001926537587857672, + "loss": 1.5582, + "step": 127 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019252923940375844, + "loss": 1.6294, + "step": 128 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019240371449751306, + "loss": 1.6087, + "step": 129 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019227718543112236, + "loss": 1.5749, + "step": 130 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019214965357959005, + "loss": 1.6041, + "step": 131 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019202112032881715, + "loss": 1.6106, + "step": 132 + }, + { + "epoch": 0.41, + "learning_rate": 0.00019189158707558695, + "loss": 1.5553, + "step": 133 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019176105522754995, + "loss": 1.5638, + "step": 134 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001916295262032084, + "loss": 1.5921, + "step": 135 + }, + { + "epoch": 0.42, + "learning_rate": 0.00019149700143190096, + "loss": 1.5837, + "step": 136 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019136348235378726, + "loss": 1.6341, + "step": 137 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019122897041983205, + "loss": 1.5678, + "step": 138 + }, + { + "epoch": 0.43, + "learning_rate": 0.00019109346709178963, + "loss": 1.6137, + "step": 139 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001909569738421878, + "loss": 1.6324, + "step": 140 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019081949215431194, + "loss": 1.612, + "step": 141 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019068102352218897, + "loss": 1.5908, + "step": 142 + }, + { + "epoch": 0.44, + "learning_rate": 0.00019054156945057097, + "loss": 1.6087, + "step": 143 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019040113145491887, + "loss": 1.5613, + "step": 144 + }, + { + "epoch": 0.45, + "learning_rate": 0.000190259711061386, + "loss": 1.6072, + "step": 145 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019011730980680156, + "loss": 1.5722, + "step": 146 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001899739292386538, + "loss": 1.5961, + "step": 147 + }, + { + "epoch": 0.46, + "learning_rate": 0.00018982957091507325, + "loss": 1.5409, + "step": 148 + }, + { + "epoch": 0.46, + "learning_rate": 0.0001896842364048159, + "loss": 1.6557, + "step": 149 + }, + { + "epoch": 0.47, + "learning_rate": 0.000189537927287246, + "loss": 1.5725, + "step": 150 + }, + { + "epoch": 0.47, + "eval_loss": 1.6101970672607422, + "eval_runtime": 233.5313, + "eval_samples_per_second": 16.37, + "eval_steps_per_second": 4.094, + "step": 150 + }, + { + "epoch": 0.47, + "learning_rate": 0.00018939064515231888, + "loss": 1.5949, + "step": 151 + }, + { + "epoch": 0.47, + "learning_rate": 0.0001892423916005639, + "loss": 1.6191, + "step": 152 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018909316824306674, + "loss": 1.5487, + "step": 153 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018894297670145216, + "loss": 1.5104, + "step": 154 + }, + { + "epoch": 0.48, + "learning_rate": 0.00018879181860786623, + "loss": 1.6392, + "step": 155 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018863969560495866, + "loss": 1.5932, + "step": 156 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018848660934586491, + "loss": 1.6213, + "step": 157 + }, + { + "epoch": 0.49, + "learning_rate": 0.0001883325614941882, + "loss": 1.5515, + "step": 158 + }, + { + "epoch": 0.49, + "learning_rate": 0.00018817755372398155, + "loss": 1.6166, + "step": 159 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018802158771972943, + "loss": 1.6552, + "step": 160 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018786466517632956, + "loss": 1.6378, + "step": 161 + }, + { + "epoch": 0.5, + "learning_rate": 0.00018770678779907448, + "loss": 1.5176, + "step": 162 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018754795730363302, + "loss": 1.5793, + "step": 163 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018738817541603156, + "loss": 1.6616, + "step": 164 + }, + { + "epoch": 0.51, + "learning_rate": 0.00018722744387263544, + "loss": 1.6055, + "step": 165 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018706576442012994, + "loss": 1.6204, + "step": 166 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018690313881550137, + "loss": 1.5952, + "step": 167 + }, + { + "epoch": 0.52, + "learning_rate": 0.00018673956882601803, + "loss": 1.6271, + "step": 168 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018657505622921082, + "loss": 1.538, + "step": 169 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018640960281285417, + "loss": 1.5874, + "step": 170 + }, + { + "epoch": 0.53, + "learning_rate": 0.0001862432103749464, + "loss": 1.5694, + "step": 171 + }, + { + "epoch": 0.53, + "learning_rate": 0.00018607588072369033, + "loss": 1.583, + "step": 172 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018590761567747354, + "loss": 1.5961, + "step": 173 + }, + { + "epoch": 0.54, + "learning_rate": 0.00018573841706484866, + "loss": 1.582, + "step": 174 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001855682867245134, + "loss": 1.6427, + "step": 175 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018539722650529075, + "loss": 1.604, + "step": 176 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018522523826610868, + "loss": 1.577, + "step": 177 + }, + { + "epoch": 0.55, + "learning_rate": 0.00018505232387598018, + "loss": 1.6339, + "step": 178 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018487848521398265, + "loss": 1.5993, + "step": 179 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001847037241692378, + "loss": 1.6286, + "step": 180 + }, + { + "epoch": 0.56, + "learning_rate": 0.00018452804264089084, + "loss": 1.5963, + "step": 181 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018435144253809, + "loss": 1.5856, + "step": 182 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018417392577996578, + "loss": 1.5787, + "step": 183 + }, + { + "epoch": 0.57, + "learning_rate": 0.00018399549429561006, + "loss": 1.5876, + "step": 184 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018381615002405509, + "loss": 1.5565, + "step": 185 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018363589491425248, + "loss": 1.5897, + "step": 186 + }, + { + "epoch": 0.58, + "learning_rate": 0.0001834547309250521, + "loss": 1.5951, + "step": 187 + }, + { + "epoch": 0.58, + "learning_rate": 0.00018327266002518056, + "loss": 1.5447, + "step": 188 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018308968419322003, + "loss": 1.6087, + "step": 189 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018290580541758668, + "loss": 1.5946, + "step": 190 + }, + { + "epoch": 0.59, + "learning_rate": 0.00018272102569650905, + "loss": 1.6148, + "step": 191 + }, + { + "epoch": 0.6, + "learning_rate": 0.00018253534703800627, + "loss": 1.649, + "step": 192 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001823487714598664, + "loss": 1.6312, + "step": 193 + }, + { + "epoch": 0.6, + "learning_rate": 0.0001821613009896244, + "loss": 1.5858, + "step": 194 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018197293766454003, + "loss": 1.5925, + "step": 195 + }, + { + "epoch": 0.61, + "learning_rate": 0.0001817836835315759, + "loss": 1.5604, + "step": 196 + }, + { + "epoch": 0.61, + "learning_rate": 0.00018159354064737506, + "loss": 1.6125, + "step": 197 + }, + { + "epoch": 0.62, + "learning_rate": 0.0001814025110782387, + "loss": 1.5954, + "step": 198 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018121059690010368, + "loss": 1.5937, + "step": 199 + }, + { + "epoch": 0.62, + "learning_rate": 0.00018101780019852008, + "loss": 1.5582, + "step": 200 + }, + { + "epoch": 0.62, + "eval_loss": 1.6065257787704468, + "eval_runtime": 233.7919, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 200 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018082412306862837, + "loss": 1.5628, + "step": 201 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018062956761513675, + "loss": 1.5735, + "step": 202 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018043413595229818, + "loss": 1.6011, + "step": 203 + }, + { + "epoch": 0.63, + "learning_rate": 0.00018023783020388763, + "loss": 1.5434, + "step": 204 + }, + { + "epoch": 0.64, + "learning_rate": 0.00018004065250317868, + "loss": 1.5533, + "step": 205 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017984260499292058, + "loss": 1.6074, + "step": 206 + }, + { + "epoch": 0.64, + "learning_rate": 0.00017964368982531487, + "loss": 1.5286, + "step": 207 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017944390916199203, + "loss": 1.5161, + "step": 208 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017924326517398793, + "loss": 1.6024, + "step": 209 + }, + { + "epoch": 0.65, + "learning_rate": 0.00017904176004172027, + "loss": 1.5727, + "step": 210 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001788393959549649, + "loss": 1.5752, + "step": 211 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017863617511283203, + "loss": 1.5845, + "step": 212 + }, + { + "epoch": 0.66, + "learning_rate": 0.00017843209972374233, + "loss": 1.6082, + "step": 213 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017822717200540283, + "loss": 1.5895, + "step": 214 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017802139418478298, + "loss": 1.5836, + "step": 215 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017781476849809038, + "loss": 1.5996, + "step": 216 + }, + { + "epoch": 0.67, + "learning_rate": 0.00017760729719074644, + "loss": 1.6256, + "step": 217 + }, + { + "epoch": 0.68, + "learning_rate": 0.000177398982517362, + "loss": 1.628, + "step": 218 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017718982674171284, + "loss": 1.5543, + "step": 219 + }, + { + "epoch": 0.68, + "learning_rate": 0.00017697983213671515, + "loss": 1.5732, + "step": 220 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001767690009844007, + "loss": 1.5892, + "step": 221 + }, + { + "epoch": 0.69, + "learning_rate": 0.0001765573355758921, + "loss": 1.6524, + "step": 222 + }, + { + "epoch": 0.69, + "learning_rate": 0.00017634483821137787, + "loss": 1.5694, + "step": 223 + }, + { + "epoch": 0.7, + "learning_rate": 0.0001761315112000876, + "loss": 1.6006, + "step": 224 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017591735686026661, + "loss": 1.6161, + "step": 225 + }, + { + "epoch": 0.7, + "learning_rate": 0.00017570237751915092, + "loss": 1.595, + "step": 226 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017548657551294192, + "loss": 1.6072, + "step": 227 + }, + { + "epoch": 0.71, + "learning_rate": 0.000175269953186781, + "loss": 1.5855, + "step": 228 + }, + { + "epoch": 0.71, + "learning_rate": 0.00017505251289472406, + "loss": 1.597, + "step": 229 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001748342569997158, + "loss": 1.5837, + "step": 230 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017461518787356432, + "loss": 1.5422, + "step": 231 + }, + { + "epoch": 0.72, + "learning_rate": 0.00017439530789691506, + "loss": 1.5837, + "step": 232 + }, + { + "epoch": 0.72, + "learning_rate": 0.0001741746194592251, + "loss": 1.6038, + "step": 233 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017395312495873717, + "loss": 1.5882, + "step": 234 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017373082680245347, + "loss": 1.5763, + "step": 235 + }, + { + "epoch": 0.73, + "learning_rate": 0.00017350772740610976, + "loss": 1.6046, + "step": 236 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017328382919414877, + "loss": 1.594, + "step": 237 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017305913459969414, + "loss": 1.5903, + "step": 238 + }, + { + "epoch": 0.74, + "learning_rate": 0.00017283364606452396, + "loss": 1.5704, + "step": 239 + }, + { + "epoch": 0.75, + "learning_rate": 0.0001726073660390439, + "loss": 1.588, + "step": 240 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017238029698226113, + "loss": 1.6273, + "step": 241 + }, + { + "epoch": 0.75, + "learning_rate": 0.00017215244136175705, + "loss": 1.5166, + "step": 242 + }, + { + "epoch": 0.76, + "learning_rate": 0.00017192380165366092, + "loss": 1.5813, + "step": 243 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001716943803426226, + "loss": 1.5654, + "step": 244 + }, + { + "epoch": 0.76, + "learning_rate": 0.0001714641799217858, + "loss": 1.5548, + "step": 245 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017123320289276085, + "loss": 1.5491, + "step": 246 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001710014517655976, + "loss": 1.5903, + "step": 247 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017076892905875806, + "loss": 1.5687, + "step": 248 + }, + { + "epoch": 0.77, + "learning_rate": 0.00017053563729908905, + "loss": 1.5975, + "step": 249 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017030157902179485, + "loss": 1.6055, + "step": 250 + }, + { + "epoch": 0.78, + "eval_loss": 1.60513174533844, + "eval_runtime": 233.7813, + "eval_samples_per_second": 16.353, + "eval_steps_per_second": 4.089, + "step": 250 + }, + { + "epoch": 0.78, + "learning_rate": 0.00017006675677040946, + "loss": 1.4661, + "step": 251 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016983117309676908, + "loss": 1.6071, + "step": 252 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016959483056098445, + "loss": 1.5664, + "step": 253 + }, + { + "epoch": 0.79, + "learning_rate": 0.0001693577317314129, + "loss": 1.5189, + "step": 254 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016911987918463034, + "loss": 1.5488, + "step": 255 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001688812755054036, + "loss": 1.6153, + "step": 256 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016864192328666202, + "loss": 1.536, + "step": 257 + }, + { + "epoch": 0.8, + "learning_rate": 0.00016840182512946943, + "loss": 1.624, + "step": 258 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016816098364299582, + "loss": 1.569, + "step": 259 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016791940144448902, + "loss": 1.588, + "step": 260 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001676770811592463, + "loss": 1.5626, + "step": 261 + }, + { + "epoch": 0.81, + "learning_rate": 0.00016743402542058572, + "loss": 1.5836, + "step": 262 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016719023686981763, + "loss": 1.5573, + "step": 263 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016694571815621586, + "loss": 1.5815, + "step": 264 + }, + { + "epoch": 0.82, + "learning_rate": 0.00016670047193698912, + "loss": 1.64, + "step": 265 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001664545008772518, + "loss": 1.6395, + "step": 266 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016620780764999536, + "loss": 1.5927, + "step": 267 + }, + { + "epoch": 0.83, + "learning_rate": 0.00016596039493605913, + "loss": 1.605, + "step": 268 + }, + { + "epoch": 0.84, + "learning_rate": 0.000165712265424101, + "loss": 1.6219, + "step": 269 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001654634218105686, + "loss": 1.5458, + "step": 270 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001652138667996696, + "loss": 1.59, + "step": 271 + }, + { + "epoch": 0.85, + "learning_rate": 0.00016496360310334253, + "loss": 1.633, + "step": 272 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001647126334412274, + "loss": 1.6108, + "step": 273 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001644609605406358, + "loss": 1.5747, + "step": 274 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001642085871365217, + "loss": 1.5393, + "step": 275 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016395551597145133, + "loss": 1.5768, + "step": 276 + }, + { + "epoch": 0.86, + "learning_rate": 0.00016370174979557368, + "loss": 1.6278, + "step": 277 + }, + { + "epoch": 0.86, + "learning_rate": 0.0001634472913665904, + "loss": 1.5983, + "step": 278 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016319214344972602, + "loss": 1.5701, + "step": 279 + }, + { + "epoch": 0.87, + "learning_rate": 0.00016293630881769773, + "loss": 1.5874, + "step": 280 + }, + { + "epoch": 0.87, + "learning_rate": 0.0001626797902506853, + "loss": 1.5412, + "step": 281 + }, + { + "epoch": 0.88, + "learning_rate": 0.000162422590536301, + "loss": 1.5733, + "step": 282 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016216471246955906, + "loss": 1.6245, + "step": 283 + }, + { + "epoch": 0.88, + "learning_rate": 0.00016190615885284553, + "loss": 1.5743, + "step": 284 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016164693249588768, + "loss": 1.5793, + "step": 285 + }, + { + "epoch": 0.89, + "learning_rate": 0.00016138703621572346, + "loss": 1.5672, + "step": 286 + }, + { + "epoch": 0.89, + "learning_rate": 0.0001611264728366711, + "loss": 1.5442, + "step": 287 + }, + { + "epoch": 0.9, + "learning_rate": 0.0001608652451902981, + "loss": 1.5765, + "step": 288 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016060335611539072, + "loss": 1.6058, + "step": 289 + }, + { + "epoch": 0.9, + "learning_rate": 0.00016034080845792295, + "loss": 1.6156, + "step": 290 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001600776050710257, + "loss": 1.6179, + "step": 291 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001598137488149558, + "loss": 1.5747, + "step": 292 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015954924255706478, + "loss": 1.5772, + "step": 293 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015928408917176786, + "loss": 1.6064, + "step": 294 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015901829154051265, + "loss": 1.6082, + "step": 295 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015875185255174787, + "loss": 1.5768, + "step": 296 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001584847751008918, + "loss": 1.5466, + "step": 297 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015821706209030118, + "loss": 1.5127, + "step": 298 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015794871642923927, + "loss": 1.5745, + "step": 299 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015767974103384443, + "loss": 1.5733, + "step": 300 + }, + { + "epoch": 0.93, + "eval_loss": 1.6023043394088745, + "eval_runtime": 233.7298, + "eval_samples_per_second": 16.356, + "eval_steps_per_second": 4.09, + "step": 300 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001574101388270984, + "loss": 1.6189, + "step": 301 + }, + { + "epoch": 0.94, + "learning_rate": 0.0001571399127387946, + "loss": 1.54, + "step": 302 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015686906570550616, + "loss": 1.5419, + "step": 303 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015659760067055417, + "loss": 1.576, + "step": 304 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015632552058397544, + "loss": 1.6072, + "step": 305 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015605282840249087, + "loss": 1.5429, + "step": 306 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015577952708947272, + "loss": 1.5149, + "step": 307 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015550561961491304, + "loss": 1.5744, + "step": 308 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015523110895539097, + "loss": 1.6155, + "step": 309 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015495599809404044, + "loss": 1.541, + "step": 310 + }, + { + "epoch": 0.97, + "learning_rate": 0.000154680290020518, + "loss": 1.5227, + "step": 311 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015440398773097002, + "loss": 1.5462, + "step": 312 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015412709422800037, + "loss": 1.56, + "step": 313 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015384961252063763, + "loss": 1.6597, + "step": 314 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015357154562430252, + "loss": 1.5917, + "step": 315 + }, + { + "epoch": 0.98, + "learning_rate": 0.000153292896560775, + "loss": 1.6058, + "step": 316 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001530136683581615, + "loss": 1.581, + "step": 317 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015273386405086209, + "loss": 1.592, + "step": 318 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015245348667953726, + "loss": 1.5711, + "step": 319 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001521725392910753, + "loss": 1.5829, + "step": 320 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015189102493855868, + "loss": 1.5786, + "step": 321 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015160894668123123, + "loss": 1.5848, + "step": 322 + }, + { + "epoch": 1.0, + "learning_rate": 0.0001513263075844648, + "loss": 1.482, + "step": 323 + }, + { + "epoch": 1.01, + "learning_rate": 0.000151043110719726, + "loss": 1.495, + "step": 324 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015075935916454255, + "loss": 1.4535, + "step": 325 + }, + { + "epoch": 1.01, + "learning_rate": 0.00015047505600247028, + "loss": 1.5398, + "step": 326 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001501902043230592, + "loss": 1.4649, + "step": 327 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014990480722182022, + "loss": 1.512, + "step": 328 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001496188678001914, + "loss": 1.4365, + "step": 329 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014933238916550425, + "loss": 1.5408, + "step": 330 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014904537443094986, + "loss": 1.4992, + "step": 331 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014875782671554526, + "loss": 1.5125, + "step": 332 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014846974914409943, + "loss": 1.4823, + "step": 333 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014818114484717933, + "loss": 1.4985, + "step": 334 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014789201696107594, + "loss": 1.457, + "step": 335 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014760236862777, + "loss": 1.4623, + "step": 336 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001473122029948982, + "loss": 1.466, + "step": 337 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001470215232157186, + "loss": 1.4982, + "step": 338 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014673033244907665, + "loss": 1.4369, + "step": 339 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014643863385937076, + "loss": 1.4698, + "step": 340 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014614643061651772, + "loss": 1.4462, + "step": 341 + }, + { + "epoch": 1.06, + "learning_rate": 0.0001458537258959186, + "loss": 1.4513, + "step": 342 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014556052287842413, + "loss": 1.4304, + "step": 343 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014526682475029994, + "loss": 1.4953, + "step": 344 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014497263470319215, + "loss": 1.4209, + "step": 345 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014467795593409256, + "loss": 1.4522, + "step": 346 + }, + { + "epoch": 1.08, + "learning_rate": 0.000144382791645304, + "loss": 1.495, + "step": 347 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001440871450444055, + "loss": 1.4461, + "step": 348 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014379101934421736, + "loss": 1.4592, + "step": 349 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001434944177627664, + "loss": 1.4885, + "step": 350 + }, + { + "epoch": 1.09, + "eval_loss": 1.6130114793777466, + "eval_runtime": 233.7594, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 350 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014319734352325077, + "loss": 1.5119, + "step": 351 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014289979985400515, + "loss": 1.4618, + "step": 352 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014260178998846547, + "loss": 1.499, + "step": 353 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014230331716513396, + "loss": 1.4611, + "step": 354 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014200438462754373, + "loss": 1.4503, + "step": 355 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014170499562422376, + "loss": 1.472, + "step": 356 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014140515340866337, + "loss": 1.4654, + "step": 357 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014110486123927718, + "loss": 1.4245, + "step": 358 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001408041223793693, + "loss": 1.4944, + "step": 359 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014050294009709813, + "loss": 1.481, + "step": 360 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014020131766544084, + "loss": 1.4592, + "step": 361 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001398992583621577, + "loss": 1.5189, + "step": 362 + }, + { + "epoch": 1.13, + "learning_rate": 0.0001395967654697565, + "loss": 1.4575, + "step": 363 + }, + { + "epoch": 1.13, + "learning_rate": 0.00013929384227545692, + "loss": 1.5033, + "step": 364 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001389904920711547, + "loss": 1.5161, + "step": 365 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013868671815338605, + "loss": 1.4703, + "step": 366 + }, + { + "epoch": 1.14, + "learning_rate": 0.0001383825238232916, + "loss": 1.4617, + "step": 367 + }, + { + "epoch": 1.14, + "learning_rate": 0.00013807791238658077, + "loss": 1.4599, + "step": 368 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013777288715349559, + "loss": 1.4871, + "step": 369 + }, + { + "epoch": 1.15, + "learning_rate": 0.0001374674514387749, + "loss": 1.4825, + "step": 370 + }, + { + "epoch": 1.15, + "learning_rate": 0.00013716160856161834, + "loss": 1.5001, + "step": 371 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013685536184565017, + "loss": 1.3828, + "step": 372 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013654871461888317, + "loss": 1.4882, + "step": 373 + }, + { + "epoch": 1.16, + "learning_rate": 0.00013624167021368257, + "loss": 1.4426, + "step": 374 + }, + { + "epoch": 1.17, + "learning_rate": 0.0001359342319667298, + "loss": 1.4827, + "step": 375 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013562640321898613, + "loss": 1.4811, + "step": 376 + }, + { + "epoch": 1.17, + "learning_rate": 0.00013531818731565647, + "loss": 1.4937, + "step": 377 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013500958760615306, + "loss": 1.4668, + "step": 378 + }, + { + "epoch": 1.18, + "learning_rate": 0.00013470060744405883, + "loss": 1.4579, + "step": 379 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001343912501870913, + "loss": 1.4692, + "step": 380 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013408151919706583, + "loss": 1.4927, + "step": 381 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013377141783985918, + "loss": 1.5073, + "step": 382 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013346094948537296, + "loss": 1.4771, + "step": 383 + }, + { + "epoch": 1.19, + "learning_rate": 0.00013315011750749688, + "loss": 1.5233, + "step": 384 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013283892528407235, + "loss": 1.4379, + "step": 385 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013252737619685542, + "loss": 1.493, + "step": 386 + }, + { + "epoch": 1.2, + "learning_rate": 0.00013221547363148034, + "loss": 1.4174, + "step": 387 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013190322097742259, + "loss": 1.4108, + "step": 388 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013159062162796208, + "loss": 1.4713, + "step": 389 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013127767898014637, + "loss": 1.4511, + "step": 390 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001309643964347536, + "loss": 1.4752, + "step": 391 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013065077739625566, + "loss": 1.4798, + "step": 392 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013033682527278107, + "loss": 1.4372, + "step": 393 + }, + { + "epoch": 1.23, + "learning_rate": 0.0001300225434760781, + "loss": 1.4556, + "step": 394 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012970793542147756, + "loss": 1.5026, + "step": 395 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012939300452785574, + "loss": 1.4878, + "step": 396 + }, + { + "epoch": 1.23, + "learning_rate": 0.00012907775421759732, + "loss": 1.479, + "step": 397 + }, + { + "epoch": 1.24, + "learning_rate": 0.000128762187916558, + "loss": 1.4508, + "step": 398 + }, + { + "epoch": 1.24, + "learning_rate": 0.0001284463090540275, + "loss": 1.4923, + "step": 399 + }, + { + "epoch": 1.24, + "learning_rate": 0.00012813012106269208, + "loss": 1.484, + "step": 400 + }, + { + "epoch": 1.24, + "eval_loss": 1.616938829421997, + "eval_runtime": 233.7894, + "eval_samples_per_second": 16.352, + "eval_steps_per_second": 4.089, + "step": 400 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012781362737859735, + "loss": 1.4867, + "step": 401 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012749683144111095, + "loss": 1.4923, + "step": 402 + }, + { + "epoch": 1.25, + "learning_rate": 0.00012717973669288513, + "loss": 1.4858, + "step": 403 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012686234657981933, + "loss": 1.4464, + "step": 404 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012654466455102272, + "loss": 1.4598, + "step": 405 + }, + { + "epoch": 1.26, + "learning_rate": 0.00012622669405877685, + "loss": 1.4237, + "step": 406 + }, + { + "epoch": 1.27, + "learning_rate": 0.0001259084385584979, + "loss": 1.475, + "step": 407 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012558990150869935, + "loss": 1.5201, + "step": 408 + }, + { + "epoch": 1.27, + "learning_rate": 0.00012527108637095427, + "loss": 1.4735, + "step": 409 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012495199660985767, + "loss": 1.4676, + "step": 410 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012463263569298914, + "loss": 1.4671, + "step": 411 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012431300709087468, + "loss": 1.4724, + "step": 412 + }, + { + "epoch": 1.28, + "learning_rate": 0.00012399311427694945, + "loss": 1.5451, + "step": 413 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001236729607275197, + "loss": 1.492, + "step": 414 + }, + { + "epoch": 1.29, + "learning_rate": 0.00012335254992172512, + "loss": 1.5186, + "step": 415 + }, + { + "epoch": 1.29, + "learning_rate": 0.0001230318853415012, + "loss": 1.4622, + "step": 416 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012271097047154096, + "loss": 1.4937, + "step": 417 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012238980879925756, + "loss": 1.4575, + "step": 418 + }, + { + "epoch": 1.3, + "learning_rate": 0.00012206840381474608, + "loss": 1.4801, + "step": 419 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012174675901074577, + "loss": 1.4523, + "step": 420 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012142487788260191, + "loss": 1.4957, + "step": 421 + }, + { + "epoch": 1.31, + "learning_rate": 0.00012110276392822799, + "loss": 1.4757, + "step": 422 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001207804206480677, + "loss": 1.4769, + "step": 423 + }, + { + "epoch": 1.32, + "learning_rate": 0.00012045785154505676, + "loss": 1.4435, + "step": 424 + }, + { + "epoch": 1.32, + "learning_rate": 0.000120135060124585, + "loss": 1.5211, + "step": 425 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011981204989445811, + "loss": 1.4248, + "step": 426 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011948882436485969, + "loss": 1.4883, + "step": 427 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011916538704831293, + "loss": 1.4919, + "step": 428 + }, + { + "epoch": 1.33, + "learning_rate": 0.00011884174145964262, + "loss": 1.4689, + "step": 429 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011851789111593688, + "loss": 1.4071, + "step": 430 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011819383953650874, + "loss": 1.4418, + "step": 431 + }, + { + "epoch": 1.34, + "learning_rate": 0.00011786959024285826, + "loss": 1.5206, + "step": 432 + }, + { + "epoch": 1.35, + "learning_rate": 0.00011754514675863408, + "loss": 1.446, + "step": 433 + }, + { + "epoch": 1.35, + "learning_rate": 0.000117220512609595, + "loss": 1.5165, + "step": 434 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001168956913235719, + "loss": 1.4119, + "step": 435 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011657068643042924, + "loss": 1.503, + "step": 436 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011624550146202682, + "loss": 1.4573, + "step": 437 + }, + { + "epoch": 1.36, + "learning_rate": 0.00011592013995218123, + "loss": 1.4707, + "step": 438 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011559460543662768, + "loss": 1.4304, + "step": 439 + }, + { + "epoch": 1.37, + "learning_rate": 0.00011526890145298137, + "loss": 1.4465, + "step": 440 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001149430315406991, + "loss": 1.4912, + "step": 441 + }, + { + "epoch": 1.37, + "learning_rate": 0.0001146169992410409, + "loss": 1.4549, + "step": 442 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011429080809703145, + "loss": 1.4528, + "step": 443 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011396446165342165, + "loss": 1.4148, + "step": 444 + }, + { + "epoch": 1.38, + "learning_rate": 0.00011363796345665001, + "loss": 1.467, + "step": 445 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001133113170548041, + "loss": 1.492, + "step": 446 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011298452599758217, + "loss": 1.5244, + "step": 447 + }, + { + "epoch": 1.39, + "learning_rate": 0.00011265759383625436, + "loss": 1.4553, + "step": 448 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001123305241236243, + "loss": 1.4764, + "step": 449 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011200332041399027, + "loss": 1.4354, + "step": 450 + }, + { + "epoch": 1.4, + "eval_loss": 1.6193681955337524, + "eval_runtime": 233.6751, + "eval_samples_per_second": 16.36, + "eval_steps_per_second": 4.091, + "step": 450 + }, + { + "epoch": 1.4, + "learning_rate": 0.00011167598626310682, + "loss": 1.4946, + "step": 451 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011134852522814596, + "loss": 1.4558, + "step": 452 + }, + { + "epoch": 1.41, + "learning_rate": 0.0001110209408676586, + "loss": 1.4549, + "step": 453 + }, + { + "epoch": 1.41, + "learning_rate": 0.00011069323674153585, + "loss": 1.4992, + "step": 454 + }, + { + "epoch": 1.42, + "learning_rate": 0.0001103654164109702, + "loss": 1.4828, + "step": 455 + }, + { + "epoch": 1.42, + "learning_rate": 0.00011003748343841711, + "loss": 1.4939, + "step": 456 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010970944138755604, + "loss": 1.4761, + "step": 457 + }, + { + "epoch": 1.42, + "learning_rate": 0.00010938129382325184, + "loss": 1.4394, + "step": 458 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010905304431151602, + "loss": 1.4852, + "step": 459 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010872469641946783, + "loss": 1.4479, + "step": 460 + }, + { + "epoch": 1.43, + "learning_rate": 0.00010839625371529583, + "loss": 1.5161, + "step": 461 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010806771976821872, + "loss": 1.5104, + "step": 462 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001077390981484469, + "loss": 1.5056, + "step": 463 + }, + { + "epoch": 1.44, + "learning_rate": 0.00010741039242714337, + "loss": 1.4919, + "step": 464 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010708160617638521, + "loss": 1.4605, + "step": 465 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010675274296912452, + "loss": 1.5191, + "step": 466 + }, + { + "epoch": 1.45, + "learning_rate": 0.00010642380637914975, + "loss": 1.4504, + "step": 467 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010609479998104684, + "loss": 1.4619, + "step": 468 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010576572735016016, + "loss": 1.4619, + "step": 469 + }, + { + "epoch": 1.46, + "learning_rate": 0.00010543659206255409, + "loss": 1.4962, + "step": 470 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010510739769497378, + "loss": 1.4901, + "step": 471 + }, + { + "epoch": 1.47, + "learning_rate": 0.0001047781478248063, + "loss": 1.4708, + "step": 472 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010444884603004213, + "loss": 1.4756, + "step": 473 + }, + { + "epoch": 1.47, + "learning_rate": 0.00010411949588923577, + "loss": 1.3948, + "step": 474 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010379010098146728, + "loss": 1.5183, + "step": 475 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010346066488630308, + "loss": 1.4252, + "step": 476 + }, + { + "epoch": 1.48, + "learning_rate": 0.00010313119118375727, + "loss": 1.4686, + "step": 477 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010280168345425256, + "loss": 1.5285, + "step": 478 + }, + { + "epoch": 1.49, + "learning_rate": 0.00010247214527858149, + "loss": 1.4649, + "step": 479 + }, + { + "epoch": 1.49, + "learning_rate": 0.0001021425802378674, + "loss": 1.4602, + "step": 480 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010181299191352566, + "loss": 1.5102, + "step": 481 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010148338388722465, + "loss": 1.4894, + "step": 482 + }, + { + "epoch": 1.5, + "learning_rate": 0.00010115375974084677, + "loss": 1.501, + "step": 483 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010082412305644964, + "loss": 1.481, + "step": 484 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010049447741622717, + "loss": 1.4927, + "step": 485 + }, + { + "epoch": 1.51, + "learning_rate": 0.00010016482640247058, + "loss": 1.512, + "step": 486 + }, + { + "epoch": 1.51, + "learning_rate": 9.983517359752945e-05, + "loss": 1.4622, + "step": 487 + }, + { + "epoch": 1.52, + "learning_rate": 9.950552258377284e-05, + "loss": 1.4956, + "step": 488 + }, + { + "epoch": 1.52, + "learning_rate": 9.917587694355037e-05, + "loss": 1.493, + "step": 489 + }, + { + "epoch": 1.52, + "learning_rate": 9.884624025915328e-05, + "loss": 1.4629, + "step": 490 + }, + { + "epoch": 1.53, + "learning_rate": 9.851661611277537e-05, + "loss": 1.4531, + "step": 491 + }, + { + "epoch": 1.53, + "learning_rate": 9.818700808647435e-05, + "loss": 1.4656, + "step": 492 + }, + { + "epoch": 1.53, + "learning_rate": 9.785741976213261e-05, + "loss": 1.4982, + "step": 493 + }, + { + "epoch": 1.54, + "learning_rate": 9.752785472141854e-05, + "loss": 1.5053, + "step": 494 + }, + { + "epoch": 1.54, + "learning_rate": 9.719831654574745e-05, + "loss": 1.4619, + "step": 495 + }, + { + "epoch": 1.54, + "learning_rate": 9.686880881624275e-05, + "loss": 1.486, + "step": 496 + }, + { + "epoch": 1.55, + "learning_rate": 9.653933511369696e-05, + "loss": 1.4788, + "step": 497 + }, + { + "epoch": 1.55, + "learning_rate": 9.620989901853275e-05, + "loss": 1.4663, + "step": 498 + }, + { + "epoch": 1.55, + "learning_rate": 9.588050411076424e-05, + "loss": 1.5138, + "step": 499 + }, + { + "epoch": 1.56, + "learning_rate": 9.555115396995788e-05, + "loss": 1.4427, + "step": 500 + }, + { + "epoch": 1.56, + "eval_loss": 1.6187018156051636, + "eval_runtime": 233.6591, + "eval_samples_per_second": 16.361, + "eval_steps_per_second": 4.091, + "step": 500 + }, + { + "epoch": 1.56, + "learning_rate": 9.522185217519371e-05, + "loss": 1.4696, + "step": 501 + }, + { + "epoch": 1.56, + "learning_rate": 9.489260230502626e-05, + "loss": 1.4052, + "step": 502 + }, + { + "epoch": 1.56, + "learning_rate": 9.45634079374459e-05, + "loss": 1.4688, + "step": 503 + }, + { + "epoch": 1.57, + "learning_rate": 9.423427264983986e-05, + "loss": 1.4266, + "step": 504 + }, + { + "epoch": 1.57, + "learning_rate": 9.390520001895321e-05, + "loss": 1.4887, + "step": 505 + }, + { + "epoch": 1.57, + "learning_rate": 9.357619362085027e-05, + "loss": 1.4992, + "step": 506 + }, + { + "epoch": 1.58, + "learning_rate": 9.32472570308755e-05, + "loss": 1.4626, + "step": 507 + }, + { + "epoch": 1.58, + "learning_rate": 9.291839382361481e-05, + "loss": 1.4984, + "step": 508 + }, + { + "epoch": 1.58, + "learning_rate": 9.258960757285664e-05, + "loss": 1.3692, + "step": 509 + }, + { + "epoch": 1.59, + "learning_rate": 9.226090185155314e-05, + "loss": 1.4325, + "step": 510 + }, + { + "epoch": 1.59, + "learning_rate": 9.19322802317813e-05, + "loss": 1.5049, + "step": 511 + }, + { + "epoch": 1.59, + "learning_rate": 9.160374628470421e-05, + "loss": 1.4589, + "step": 512 + }, + { + "epoch": 1.6, + "learning_rate": 9.127530358053218e-05, + "loss": 1.4291, + "step": 513 + }, + { + "epoch": 1.6, + "learning_rate": 9.094695568848402e-05, + "loss": 1.4474, + "step": 514 + }, + { + "epoch": 1.6, + "learning_rate": 9.061870617674817e-05, + "loss": 1.513, + "step": 515 + }, + { + "epoch": 1.6, + "learning_rate": 9.029055861244397e-05, + "loss": 1.4609, + "step": 516 + }, + { + "epoch": 1.61, + "learning_rate": 8.99625165615829e-05, + "loss": 1.5144, + "step": 517 + }, + { + "epoch": 1.61, + "learning_rate": 8.963458358902985e-05, + "loss": 1.4294, + "step": 518 + }, + { + "epoch": 1.61, + "learning_rate": 8.93067632584642e-05, + "loss": 1.4516, + "step": 519 + }, + { + "epoch": 1.62, + "learning_rate": 8.897905913234143e-05, + "loss": 1.4659, + "step": 520 + }, + { + "epoch": 1.62, + "learning_rate": 8.865147477185405e-05, + "loss": 1.4787, + "step": 521 + }, + { + "epoch": 1.62, + "learning_rate": 8.832401373689319e-05, + "loss": 1.4601, + "step": 522 + }, + { + "epoch": 1.63, + "learning_rate": 8.799667958600973e-05, + "loss": 1.4955, + "step": 523 + }, + { + "epoch": 1.63, + "learning_rate": 8.766947587637573e-05, + "loss": 1.4231, + "step": 524 + }, + { + "epoch": 1.63, + "learning_rate": 8.734240616374565e-05, + "loss": 1.4952, + "step": 525 + }, + { + "epoch": 1.64, + "learning_rate": 8.701547400241788e-05, + "loss": 1.4707, + "step": 526 + }, + { + "epoch": 1.64, + "learning_rate": 8.668868294519593e-05, + "loss": 1.5023, + "step": 527 + }, + { + "epoch": 1.64, + "learning_rate": 8.636203654335002e-05, + "loss": 1.4702, + "step": 528 + }, + { + "epoch": 1.65, + "learning_rate": 8.603553834657836e-05, + "loss": 1.4399, + "step": 529 + }, + { + "epoch": 1.65, + "learning_rate": 8.570919190296855e-05, + "loss": 1.5175, + "step": 530 + }, + { + "epoch": 1.65, + "learning_rate": 8.53830007589591e-05, + "loss": 1.4715, + "step": 531 + }, + { + "epoch": 1.65, + "learning_rate": 8.505696845930096e-05, + "loss": 1.5292, + "step": 532 + }, + { + "epoch": 1.66, + "learning_rate": 8.473109854701869e-05, + "loss": 1.5287, + "step": 533 + }, + { + "epoch": 1.66, + "learning_rate": 8.440539456337235e-05, + "loss": 1.4762, + "step": 534 + }, + { + "epoch": 1.66, + "learning_rate": 8.407986004781879e-05, + "loss": 1.4536, + "step": 535 + }, + { + "epoch": 1.67, + "learning_rate": 8.375449853797322e-05, + "loss": 1.5018, + "step": 536 + }, + { + "epoch": 1.67, + "learning_rate": 8.342931356957076e-05, + "loss": 1.4723, + "step": 537 + }, + { + "epoch": 1.67, + "learning_rate": 8.310430867642812e-05, + "loss": 1.4905, + "step": 538 + }, + { + "epoch": 1.68, + "learning_rate": 8.277948739040503e-05, + "loss": 1.4651, + "step": 539 + }, + { + "epoch": 1.68, + "learning_rate": 8.245485324136597e-05, + "loss": 1.4482, + "step": 540 + }, + { + "epoch": 1.68, + "learning_rate": 8.213040975714175e-05, + "loss": 1.3977, + "step": 541 + }, + { + "epoch": 1.69, + "learning_rate": 8.180616046349129e-05, + "loss": 1.5594, + "step": 542 + }, + { + "epoch": 1.69, + "learning_rate": 8.148210888406316e-05, + "loss": 1.4995, + "step": 543 + }, + { + "epoch": 1.69, + "learning_rate": 8.115825854035737e-05, + "loss": 1.5106, + "step": 544 + }, + { + "epoch": 1.7, + "learning_rate": 8.083461295168707e-05, + "loss": 1.4219, + "step": 545 + }, + { + "epoch": 1.7, + "learning_rate": 8.051117563514036e-05, + "loss": 1.4766, + "step": 546 + }, + { + "epoch": 1.7, + "learning_rate": 8.018795010554193e-05, + "loss": 1.5241, + "step": 547 + }, + { + "epoch": 1.7, + "learning_rate": 7.986493987541502e-05, + "loss": 1.4673, + "step": 548 + }, + { + "epoch": 1.71, + "learning_rate": 7.954214845494325e-05, + "loss": 1.4236, + "step": 549 + }, + { + "epoch": 1.71, + "learning_rate": 7.921957935193232e-05, + "loss": 1.4687, + "step": 550 + }, + { + "epoch": 1.71, + "eval_loss": 1.617763876914978, + "eval_runtime": 233.6334, + "eval_samples_per_second": 16.363, + "eval_steps_per_second": 4.092, + "step": 550 + }, + { + "epoch": 1.71, + "learning_rate": 7.889723607177202e-05, + "loss": 1.4412, + "step": 551 + }, + { + "epoch": 1.72, + "learning_rate": 7.857512211739813e-05, + "loss": 1.4464, + "step": 552 + }, + { + "epoch": 1.72, + "learning_rate": 7.825324098925427e-05, + "loss": 1.4043, + "step": 553 + }, + { + "epoch": 1.72, + "learning_rate": 7.793159618525393e-05, + "loss": 1.4384, + "step": 554 + }, + { + "epoch": 1.73, + "learning_rate": 7.761019120074245e-05, + "loss": 1.4781, + "step": 555 + }, + { + "epoch": 1.73, + "learning_rate": 7.728902952845905e-05, + "loss": 1.4311, + "step": 556 + }, + { + "epoch": 1.73, + "learning_rate": 7.696811465849883e-05, + "loss": 1.4926, + "step": 557 + }, + { + "epoch": 1.74, + "learning_rate": 7.664745007827489e-05, + "loss": 1.4739, + "step": 558 + }, + { + "epoch": 1.74, + "learning_rate": 7.632703927248033e-05, + "loss": 1.509, + "step": 559 + }, + { + "epoch": 1.74, + "learning_rate": 7.60068857230506e-05, + "loss": 1.4555, + "step": 560 + }, + { + "epoch": 1.74, + "learning_rate": 7.568699290912533e-05, + "loss": 1.4588, + "step": 561 + }, + { + "epoch": 1.75, + "learning_rate": 7.536736430701088e-05, + "loss": 1.4574, + "step": 562 + }, + { + "epoch": 1.75, + "learning_rate": 7.504800339014232e-05, + "loss": 1.4805, + "step": 563 + }, + { + "epoch": 1.75, + "learning_rate": 7.472891362904577e-05, + "loss": 1.5081, + "step": 564 + }, + { + "epoch": 1.76, + "learning_rate": 7.441009849130067e-05, + "loss": 1.5081, + "step": 565 + }, + { + "epoch": 1.76, + "learning_rate": 7.409156144150213e-05, + "loss": 1.4548, + "step": 566 + }, + { + "epoch": 1.76, + "learning_rate": 7.377330594122317e-05, + "loss": 1.4478, + "step": 567 + }, + { + "epoch": 1.77, + "learning_rate": 7.34553354489773e-05, + "loss": 1.5048, + "step": 568 + }, + { + "epoch": 1.77, + "learning_rate": 7.31376534201807e-05, + "loss": 1.4889, + "step": 569 + }, + { + "epoch": 1.77, + "learning_rate": 7.282026330711489e-05, + "loss": 1.5045, + "step": 570 + }, + { + "epoch": 1.78, + "learning_rate": 7.250316855888906e-05, + "loss": 1.4352, + "step": 571 + }, + { + "epoch": 1.78, + "learning_rate": 7.218637262140268e-05, + "loss": 1.4881, + "step": 572 + }, + { + "epoch": 1.78, + "learning_rate": 7.186987893730797e-05, + "loss": 1.449, + "step": 573 + }, + { + "epoch": 1.79, + "learning_rate": 7.155369094597253e-05, + "loss": 1.4146, + "step": 574 + }, + { + "epoch": 1.79, + "learning_rate": 7.1237812083442e-05, + "loss": 1.4462, + "step": 575 + }, + { + "epoch": 1.79, + "learning_rate": 7.092224578240269e-05, + "loss": 1.4509, + "step": 576 + }, + { + "epoch": 1.79, + "learning_rate": 7.060699547214427e-05, + "loss": 1.4483, + "step": 577 + }, + { + "epoch": 1.8, + "learning_rate": 7.029206457852247e-05, + "loss": 1.4348, + "step": 578 + }, + { + "epoch": 1.8, + "learning_rate": 6.997745652392191e-05, + "loss": 1.4931, + "step": 579 + }, + { + "epoch": 1.8, + "learning_rate": 6.966317472721897e-05, + "loss": 1.4132, + "step": 580 + }, + { + "epoch": 1.81, + "learning_rate": 6.934922260374437e-05, + "loss": 1.3974, + "step": 581 + }, + { + "epoch": 1.81, + "learning_rate": 6.903560356524641e-05, + "loss": 1.4326, + "step": 582 + }, + { + "epoch": 1.81, + "learning_rate": 6.872232101985363e-05, + "loss": 1.4349, + "step": 583 + }, + { + "epoch": 1.82, + "learning_rate": 6.840937837203791e-05, + "loss": 1.4528, + "step": 584 + }, + { + "epoch": 1.82, + "learning_rate": 6.809677902257742e-05, + "loss": 1.4365, + "step": 585 + }, + { + "epoch": 1.82, + "learning_rate": 6.778452636851968e-05, + "loss": 1.4702, + "step": 586 + }, + { + "epoch": 1.83, + "learning_rate": 6.747262380314463e-05, + "loss": 1.458, + "step": 587 + }, + { + "epoch": 1.83, + "learning_rate": 6.71610747159277e-05, + "loss": 1.5413, + "step": 588 + }, + { + "epoch": 1.83, + "learning_rate": 6.684988249250314e-05, + "loss": 1.4205, + "step": 589 + }, + { + "epoch": 1.84, + "learning_rate": 6.653905051462708e-05, + "loss": 1.4643, + "step": 590 + }, + { + "epoch": 1.84, + "learning_rate": 6.622858216014084e-05, + "loss": 1.4071, + "step": 591 + }, + { + "epoch": 1.84, + "learning_rate": 6.591848080293418e-05, + "loss": 1.4669, + "step": 592 + }, + { + "epoch": 1.84, + "learning_rate": 6.56087498129087e-05, + "loss": 1.5062, + "step": 593 + }, + { + "epoch": 1.85, + "learning_rate": 6.52993925559412e-05, + "loss": 1.4334, + "step": 594 + }, + { + "epoch": 1.85, + "learning_rate": 6.499041239384698e-05, + "loss": 1.4696, + "step": 595 + }, + { + "epoch": 1.85, + "learning_rate": 6.468181268434354e-05, + "loss": 1.4575, + "step": 596 + }, + { + "epoch": 1.86, + "learning_rate": 6.437359678101389e-05, + "loss": 1.4432, + "step": 597 + }, + { + "epoch": 1.86, + "learning_rate": 6.406576803327022e-05, + "loss": 1.5047, + "step": 598 + }, + { + "epoch": 1.86, + "learning_rate": 6.375832978631743e-05, + "loss": 1.4297, + "step": 599 + }, + { + "epoch": 1.87, + "learning_rate": 6.345128538111685e-05, + "loss": 1.461, + "step": 600 + }, + { + "epoch": 1.87, + "eval_loss": 1.6174333095550537, + "eval_runtime": 233.649, + "eval_samples_per_second": 16.362, + "eval_steps_per_second": 4.092, + "step": 600 + }, + { + "epoch": 1.87, + "learning_rate": 6.314463815434988e-05, + "loss": 1.4978, + "step": 601 + }, + { + "epoch": 1.87, + "learning_rate": 6.283839143838169e-05, + "loss": 1.426, + "step": 602 + }, + { + "epoch": 1.88, + "learning_rate": 6.253254856122511e-05, + "loss": 1.4657, + "step": 603 + }, + { + "epoch": 1.88, + "learning_rate": 6.222711284650444e-05, + "loss": 1.5282, + "step": 604 + }, + { + "epoch": 1.88, + "learning_rate": 6.192208761341925e-05, + "loss": 1.4897, + "step": 605 + }, + { + "epoch": 1.88, + "learning_rate": 6.161747617670839e-05, + "loss": 1.4827, + "step": 606 + }, + { + "epoch": 1.89, + "learning_rate": 6.131328184661396e-05, + "loss": 1.4507, + "step": 607 + }, + { + "epoch": 1.89, + "learning_rate": 6.100950792884533e-05, + "loss": 1.4461, + "step": 608 + }, + { + "epoch": 1.89, + "learning_rate": 6.070615772454312e-05, + "loss": 1.4187, + "step": 609 + }, + { + "epoch": 1.9, + "learning_rate": 6.040323453024351e-05, + "loss": 1.4704, + "step": 610 + }, + { + "epoch": 1.9, + "learning_rate": 6.0100741637842316e-05, + "loss": 1.4869, + "step": 611 + }, + { + "epoch": 1.9, + "learning_rate": 5.979868233455917e-05, + "loss": 1.4657, + "step": 612 + }, + { + "epoch": 1.91, + "learning_rate": 5.949705990290186e-05, + "loss": 1.4234, + "step": 613 + }, + { + "epoch": 1.91, + "learning_rate": 5.919587762063072e-05, + "loss": 1.4519, + "step": 614 + }, + { + "epoch": 1.91, + "learning_rate": 5.889513876072283e-05, + "loss": 1.4588, + "step": 615 + }, + { + "epoch": 1.92, + "learning_rate": 5.859484659133663e-05, + "loss": 1.4867, + "step": 616 + }, + { + "epoch": 1.92, + "learning_rate": 5.829500437577626e-05, + "loss": 1.5157, + "step": 617 + }, + { + "epoch": 1.92, + "learning_rate": 5.799561537245628e-05, + "loss": 1.4492, + "step": 618 + }, + { + "epoch": 1.93, + "learning_rate": 5.769668283486607e-05, + "loss": 1.514, + "step": 619 + }, + { + "epoch": 1.93, + "learning_rate": 5.739821001153451e-05, + "loss": 1.5127, + "step": 620 + }, + { + "epoch": 1.93, + "learning_rate": 5.710020014599486e-05, + "loss": 1.4204, + "step": 621 + }, + { + "epoch": 1.93, + "learning_rate": 5.680265647674925e-05, + "loss": 1.4346, + "step": 622 + }, + { + "epoch": 1.94, + "learning_rate": 5.650558223723365e-05, + "loss": 1.4342, + "step": 623 + }, + { + "epoch": 1.94, + "learning_rate": 5.620898065578268e-05, + "loss": 1.4699, + "step": 624 + }, + { + "epoch": 1.94, + "learning_rate": 5.591285495559453e-05, + "loss": 1.5088, + "step": 625 + }, + { + "epoch": 1.95, + "learning_rate": 5.561720835469602e-05, + "loss": 1.5015, + "step": 626 + }, + { + "epoch": 1.95, + "learning_rate": 5.5322044065907475e-05, + "loss": 1.4243, + "step": 627 + }, + { + "epoch": 1.95, + "learning_rate": 5.502736529680785e-05, + "loss": 1.4553, + "step": 628 + }, + { + "epoch": 1.96, + "learning_rate": 5.47331752497001e-05, + "loss": 1.4419, + "step": 629 + }, + { + "epoch": 1.96, + "learning_rate": 5.443947712157587e-05, + "loss": 1.4172, + "step": 630 + }, + { + "epoch": 1.96, + "learning_rate": 5.41462741040814e-05, + "loss": 1.4888, + "step": 631 + }, + { + "epoch": 1.97, + "learning_rate": 5.385356938348234e-05, + "loss": 1.412, + "step": 632 + }, + { + "epoch": 1.97, + "learning_rate": 5.3561366140629274e-05, + "loss": 1.4327, + "step": 633 + }, + { + "epoch": 1.97, + "learning_rate": 5.326966755092334e-05, + "loss": 1.502, + "step": 634 + }, + { + "epoch": 1.98, + "learning_rate": 5.297847678428141e-05, + "loss": 1.4499, + "step": 635 + }, + { + "epoch": 1.98, + "learning_rate": 5.2687797005101834e-05, + "loss": 1.4783, + "step": 636 + }, + { + "epoch": 1.98, + "learning_rate": 5.239763137223004e-05, + "loss": 1.4378, + "step": 637 + }, + { + "epoch": 1.98, + "learning_rate": 5.21079830389241e-05, + "loss": 1.5055, + "step": 638 + }, + { + "epoch": 1.99, + "learning_rate": 5.18188551528207e-05, + "loss": 1.4963, + "step": 639 + }, + { + "epoch": 1.99, + "learning_rate": 5.1530250855900576e-05, + "loss": 1.4799, + "step": 640 + }, + { + "epoch": 1.99, + "learning_rate": 5.124217328445475e-05, + "loss": 1.4388, + "step": 641 + }, + { + "epoch": 2.0, + "learning_rate": 5.095462556905021e-05, + "loss": 1.484, + "step": 642 + }, + { + "epoch": 2.0, + "learning_rate": 5.0667610834495785e-05, + "loss": 1.4811, + "step": 643 + }, + { + "epoch": 2.0, + "learning_rate": 5.03811321998086e-05, + "loss": 1.2941, + "step": 644 + }, + { + "epoch": 2.01, + "learning_rate": 5.009519277817976e-05, + "loss": 1.3975, + "step": 645 + }, + { + "epoch": 2.01, + "learning_rate": 4.9809795676940815e-05, + "loss": 1.3432, + "step": 646 + }, + { + "epoch": 2.01, + "learning_rate": 4.952494399752976e-05, + "loss": 1.3014, + "step": 647 + }, + { + "epoch": 2.02, + "learning_rate": 4.924064083545744e-05, + "loss": 1.3491, + "step": 648 + }, + { + "epoch": 2.02, + "learning_rate": 4.8956889280274056e-05, + "loss": 1.3238, + "step": 649 + }, + { + "epoch": 2.02, + "learning_rate": 4.8673692415535186e-05, + "loss": 1.327, + "step": 650 + }, + { + "epoch": 2.02, + "eval_loss": 1.6340641975402832, + "eval_runtime": 233.6965, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 650 + }, + { + "epoch": 2.02, + "learning_rate": 4.83910533187688e-05, + "loss": 1.3208, + "step": 651 + }, + { + "epoch": 2.03, + "learning_rate": 4.810897506144137e-05, + "loss": 1.2936, + "step": 652 + }, + { + "epoch": 2.03, + "learning_rate": 4.782746070892472e-05, + "loss": 1.323, + "step": 653 + }, + { + "epoch": 2.03, + "learning_rate": 4.754651332046274e-05, + "loss": 1.3304, + "step": 654 + }, + { + "epoch": 2.04, + "learning_rate": 4.726613594913796e-05, + "loss": 1.2426, + "step": 655 + }, + { + "epoch": 2.04, + "learning_rate": 4.698633164183853e-05, + "loss": 1.2882, + "step": 656 + }, + { + "epoch": 2.04, + "learning_rate": 4.670710343922504e-05, + "loss": 1.3273, + "step": 657 + }, + { + "epoch": 2.05, + "learning_rate": 4.6428454375697485e-05, + "loss": 1.3391, + "step": 658 + }, + { + "epoch": 2.05, + "learning_rate": 4.615038747936237e-05, + "loss": 1.3143, + "step": 659 + }, + { + "epoch": 2.05, + "learning_rate": 4.587290577199965e-05, + "loss": 1.2846, + "step": 660 + }, + { + "epoch": 2.06, + "learning_rate": 4.559601226902998e-05, + "loss": 1.2887, + "step": 661 + }, + { + "epoch": 2.06, + "learning_rate": 4.531970997948203e-05, + "loss": 1.3239, + "step": 662 + }, + { + "epoch": 2.06, + "learning_rate": 4.504400190595958e-05, + "loss": 1.3552, + "step": 663 + }, + { + "epoch": 2.07, + "learning_rate": 4.476889104460907e-05, + "loss": 1.3554, + "step": 664 + }, + { + "epoch": 2.07, + "learning_rate": 4.4494380385086986e-05, + "loss": 1.3333, + "step": 665 + }, + { + "epoch": 2.07, + "learning_rate": 4.422047291052728e-05, + "loss": 1.3107, + "step": 666 + }, + { + "epoch": 2.07, + "learning_rate": 4.3947171597509176e-05, + "loss": 1.3228, + "step": 667 + }, + { + "epoch": 2.08, + "learning_rate": 4.367447941602453e-05, + "loss": 1.3224, + "step": 668 + }, + { + "epoch": 2.08, + "learning_rate": 4.3402399329445855e-05, + "loss": 1.2844, + "step": 669 + }, + { + "epoch": 2.08, + "learning_rate": 4.3130934294493885e-05, + "loss": 1.3352, + "step": 670 + }, + { + "epoch": 2.09, + "learning_rate": 4.286008726120543e-05, + "loss": 1.3217, + "step": 671 + }, + { + "epoch": 2.09, + "learning_rate": 4.2589861172901634e-05, + "loss": 1.2976, + "step": 672 + }, + { + "epoch": 2.09, + "learning_rate": 4.232025896615559e-05, + "loss": 1.3108, + "step": 673 + }, + { + "epoch": 2.1, + "learning_rate": 4.2051283570760746e-05, + "loss": 1.2893, + "step": 674 + }, + { + "epoch": 2.1, + "learning_rate": 4.178293790969883e-05, + "loss": 1.3452, + "step": 675 + }, + { + "epoch": 2.1, + "learning_rate": 4.1515224899108164e-05, + "loss": 1.332, + "step": 676 + }, + { + "epoch": 2.11, + "learning_rate": 4.1248147448252185e-05, + "loss": 1.2998, + "step": 677 + }, + { + "epoch": 2.11, + "learning_rate": 4.098170845948736e-05, + "loss": 1.2952, + "step": 678 + }, + { + "epoch": 2.11, + "learning_rate": 4.071591082823215e-05, + "loss": 1.3512, + "step": 679 + }, + { + "epoch": 2.12, + "learning_rate": 4.045075744293525e-05, + "loss": 1.3571, + "step": 680 + }, + { + "epoch": 2.12, + "learning_rate": 4.01862511850442e-05, + "loss": 1.3415, + "step": 681 + }, + { + "epoch": 2.12, + "learning_rate": 3.992239492897429e-05, + "loss": 1.3264, + "step": 682 + }, + { + "epoch": 2.12, + "learning_rate": 3.965919154207708e-05, + "loss": 1.3013, + "step": 683 + }, + { + "epoch": 2.13, + "learning_rate": 3.939664388460932e-05, + "loss": 1.369, + "step": 684 + }, + { + "epoch": 2.13, + "learning_rate": 3.913475480970193e-05, + "loss": 1.2464, + "step": 685 + }, + { + "epoch": 2.13, + "learning_rate": 3.887352716332892e-05, + "loss": 1.3162, + "step": 686 + }, + { + "epoch": 2.14, + "learning_rate": 3.861296378427656e-05, + "loss": 1.3221, + "step": 687 + }, + { + "epoch": 2.14, + "learning_rate": 3.835306750411237e-05, + "loss": 1.3219, + "step": 688 + }, + { + "epoch": 2.14, + "learning_rate": 3.8093841147154475e-05, + "loss": 1.3446, + "step": 689 + }, + { + "epoch": 2.15, + "learning_rate": 3.783528753044093e-05, + "loss": 1.3667, + "step": 690 + }, + { + "epoch": 2.15, + "learning_rate": 3.757740946369901e-05, + "loss": 1.3098, + "step": 691 + }, + { + "epoch": 2.15, + "learning_rate": 3.732020974931471e-05, + "loss": 1.3017, + "step": 692 + }, + { + "epoch": 2.16, + "learning_rate": 3.7063691182302304e-05, + "loss": 1.3354, + "step": 693 + }, + { + "epoch": 2.16, + "learning_rate": 3.680785655027399e-05, + "loss": 1.3081, + "step": 694 + }, + { + "epoch": 2.16, + "learning_rate": 3.6552708633409613e-05, + "loss": 1.2563, + "step": 695 + }, + { + "epoch": 2.16, + "learning_rate": 3.6298250204426334e-05, + "loss": 1.307, + "step": 696 + }, + { + "epoch": 2.17, + "learning_rate": 3.6044484028548676e-05, + "loss": 1.2907, + "step": 697 + }, + { + "epoch": 2.17, + "learning_rate": 3.5791412863478326e-05, + "loss": 1.3023, + "step": 698 + }, + { + "epoch": 2.17, + "learning_rate": 3.553903945936421e-05, + "loss": 1.3144, + "step": 699 + }, + { + "epoch": 2.18, + "learning_rate": 3.528736655877264e-05, + "loss": 1.3015, + "step": 700 + }, + { + "epoch": 2.18, + "eval_loss": 1.6665308475494385, + "eval_runtime": 233.6943, + "eval_samples_per_second": 16.359, + "eval_steps_per_second": 4.091, + "step": 700 + }, + { + "epoch": 2.18, + "learning_rate": 3.5036396896657455e-05, + "loss": 1.2943, + "step": 701 + }, + { + "epoch": 2.18, + "learning_rate": 3.478613320033042e-05, + "loss": 1.3333, + "step": 702 + }, + { + "epoch": 2.19, + "learning_rate": 3.453657818943142e-05, + "loss": 1.2983, + "step": 703 + }, + { + "epoch": 2.19, + "learning_rate": 3.4287734575898975e-05, + "loss": 1.3392, + "step": 704 + }, + { + "epoch": 2.19, + "learning_rate": 3.403960506394092e-05, + "loss": 1.2677, + "step": 705 + }, + { + "epoch": 2.2, + "learning_rate": 3.379219235000463e-05, + "loss": 1.3197, + "step": 706 + }, + { + "epoch": 2.2, + "learning_rate": 3.3545499122748216e-05, + "loss": 1.3343, + "step": 707 + }, + { + "epoch": 2.2, + "learning_rate": 3.329952806301092e-05, + "loss": 1.3591, + "step": 708 + }, + { + "epoch": 2.21, + "learning_rate": 3.305428184378413e-05, + "loss": 1.3272, + "step": 709 + }, + { + "epoch": 2.21, + "learning_rate": 3.280976313018239e-05, + "loss": 1.3499, + "step": 710 + }, + { + "epoch": 2.21, + "learning_rate": 3.256597457941429e-05, + "loss": 1.3371, + "step": 711 + }, + { + "epoch": 2.21, + "learning_rate": 3.232291884075373e-05, + "loss": 1.312, + "step": 712 + }, + { + "epoch": 2.22, + "learning_rate": 3.208059855551101e-05, + "loss": 1.3502, + "step": 713 + }, + { + "epoch": 2.22, + "learning_rate": 3.18390163570042e-05, + "loss": 1.3094, + "step": 714 + }, + { + "epoch": 2.22, + "learning_rate": 3.1598174870530604e-05, + "loss": 1.3181, + "step": 715 + }, + { + "epoch": 2.23, + "learning_rate": 3.1358076713338014e-05, + "loss": 1.3011, + "step": 716 + }, + { + "epoch": 2.23, + "learning_rate": 3.1118724494596405e-05, + "loss": 1.3054, + "step": 717 + }, + { + "epoch": 2.23, + "learning_rate": 3.0880120815369694e-05, + "loss": 1.3215, + "step": 718 + }, + { + "epoch": 2.24, + "learning_rate": 3.0642268268587136e-05, + "loss": 1.2908, + "step": 719 + }, + { + "epoch": 2.24, + "learning_rate": 3.0405169439015557e-05, + "loss": 1.3334, + "step": 720 + }, + { + "epoch": 2.24, + "learning_rate": 3.0168826903230906e-05, + "loss": 1.3275, + "step": 721 + }, + { + "epoch": 2.25, + "learning_rate": 2.9933243229590568e-05, + "loss": 1.3329, + "step": 722 + }, + { + "epoch": 2.25, + "learning_rate": 2.969842097820519e-05, + "loss": 1.3185, + "step": 723 + }, + { + "epoch": 2.25, + "learning_rate": 2.9464362700910943e-05, + "loss": 1.3443, + "step": 724 + }, + { + "epoch": 2.26, + "learning_rate": 2.9231070941241988e-05, + "loss": 1.3034, + "step": 725 + }, + { + "epoch": 2.26, + "learning_rate": 2.899854823440241e-05, + "loss": 1.304, + "step": 726 + }, + { + "epoch": 2.26, + "learning_rate": 2.8766797107239164e-05, + "loss": 1.3136, + "step": 727 + }, + { + "epoch": 2.26, + "learning_rate": 2.8535820078214236e-05, + "loss": 1.2894, + "step": 728 + }, + { + "epoch": 2.27, + "learning_rate": 2.8305619657377413e-05, + "loss": 1.3303, + "step": 729 + }, + { + "epoch": 2.27, + "learning_rate": 2.8076198346339113e-05, + "loss": 1.3158, + "step": 730 + }, + { + "epoch": 2.27, + "learning_rate": 2.7847558638242964e-05, + "loss": 1.3071, + "step": 731 + }, + { + "epoch": 2.28, + "learning_rate": 2.7619703017738917e-05, + "loss": 1.2951, + "step": 732 + }, + { + "epoch": 2.28, + "learning_rate": 2.7392633960956127e-05, + "loss": 1.3138, + "step": 733 + }, + { + "epoch": 2.28, + "learning_rate": 2.7166353935476085e-05, + "loss": 1.3523, + "step": 734 + }, + { + "epoch": 2.29, + "learning_rate": 2.694086540030587e-05, + "loss": 1.2937, + "step": 735 + }, + { + "epoch": 2.29, + "learning_rate": 2.671617080585127e-05, + "loss": 1.3493, + "step": 736 + }, + { + "epoch": 2.29, + "learning_rate": 2.6492272593890267e-05, + "loss": 1.309, + "step": 737 + }, + { + "epoch": 2.3, + "learning_rate": 2.6269173197546527e-05, + "loss": 1.3188, + "step": 738 + }, + { + "epoch": 2.3, + "learning_rate": 2.6046875041262852e-05, + "loss": 1.3202, + "step": 739 + }, + { + "epoch": 2.3, + "learning_rate": 2.5825380540774914e-05, + "loss": 1.359, + "step": 740 + }, + { + "epoch": 2.3, + "learning_rate": 2.560469210308497e-05, + "loss": 1.2837, + "step": 741 + }, + { + "epoch": 2.31, + "learning_rate": 2.5384812126435697e-05, + "loss": 1.3195, + "step": 742 + }, + { + "epoch": 2.31, + "learning_rate": 2.5165743000284213e-05, + "loss": 1.2797, + "step": 743 + }, + { + "epoch": 2.31, + "learning_rate": 2.4947487105275945e-05, + "loss": 1.3656, + "step": 744 + }, + { + "epoch": 2.32, + "learning_rate": 2.4730046813218987e-05, + "loss": 1.3094, + "step": 745 + }, + { + "epoch": 2.32, + "learning_rate": 2.451342448705811e-05, + "loss": 1.3176, + "step": 746 + }, + { + "epoch": 2.32, + "learning_rate": 2.4297622480849104e-05, + "loss": 1.3318, + "step": 747 + }, + { + "epoch": 2.33, + "learning_rate": 2.408264313973343e-05, + "loss": 1.3367, + "step": 748 + }, + { + "epoch": 2.33, + "learning_rate": 2.3868488799912414e-05, + "loss": 1.2717, + "step": 749 + }, + { + "epoch": 2.33, + "learning_rate": 2.3655161788622138e-05, + "loss": 1.3328, + "step": 750 + }, + { + "epoch": 2.33, + "eval_loss": 1.6713805198669434, + "eval_runtime": 233.7116, + "eval_samples_per_second": 16.358, + "eval_steps_per_second": 4.091, + "step": 750 + }, + { + "epoch": 2.34, + "learning_rate": 2.344266442410794e-05, + "loss": 1.3325, + "step": 751 + }, + { + "epoch": 2.34, + "learning_rate": 2.323099901559931e-05, + "loss": 1.3277, + "step": 752 + }, + { + "epoch": 2.34, + "learning_rate": 2.302016786328488e-05, + "loss": 1.3567, + "step": 753 + }, + { + "epoch": 2.35, + "learning_rate": 2.281017325828716e-05, + "loss": 1.3087, + "step": 754 + }, + { + "epoch": 2.35, + "learning_rate": 2.260101748263803e-05, + "loss": 1.3173, + "step": 755 + }, + { + "epoch": 2.35, + "learning_rate": 2.2392702809253596e-05, + "loss": 1.3234, + "step": 756 + }, + { + "epoch": 2.35, + "learning_rate": 2.218523150190962e-05, + "loss": 1.3649, + "step": 757 + }, + { + "epoch": 2.36, + "learning_rate": 2.1978605815217025e-05, + "loss": 1.3433, + "step": 758 + }, + { + "epoch": 2.36, + "learning_rate": 2.177282799459719e-05, + "loss": 1.2992, + "step": 759 + }, + { + "epoch": 2.36, + "learning_rate": 2.1567900276257703e-05, + "loss": 1.3004, + "step": 760 + }, + { + "epoch": 2.37, + "learning_rate": 2.1363824887167993e-05, + "loss": 1.2894, + "step": 761 + }, + { + "epoch": 2.37, + "learning_rate": 2.1160604045035115e-05, + "loss": 1.3151, + "step": 762 + }, + { + "epoch": 2.37, + "learning_rate": 2.0958239958279756e-05, + "loss": 1.2694, + "step": 763 + }, + { + "epoch": 2.38, + "learning_rate": 2.0756734826012104e-05, + "loss": 1.2979, + "step": 764 + }, + { + "epoch": 2.38, + "learning_rate": 2.0556090838007957e-05, + "loss": 1.3187, + "step": 765 + }, + { + "epoch": 2.38, + "learning_rate": 2.0356310174685124e-05, + "loss": 1.3255, + "step": 766 + }, + { + "epoch": 2.39, + "learning_rate": 2.0157395007079428e-05, + "loss": 1.3623, + "step": 767 + }, + { + "epoch": 2.39, + "learning_rate": 1.9959347496821333e-05, + "loss": 1.317, + "step": 768 + }, + { + "epoch": 2.39, + "learning_rate": 1.9762169796112397e-05, + "loss": 1.3102, + "step": 769 + }, + { + "epoch": 2.4, + "learning_rate": 1.956586404770182e-05, + "loss": 1.244, + "step": 770 + }, + { + "epoch": 2.4, + "learning_rate": 1.937043238486329e-05, + "loss": 1.3051, + "step": 771 + }, + { + "epoch": 2.4, + "learning_rate": 1.9175876931371626e-05, + "loss": 1.2869, + "step": 772 + }, + { + "epoch": 2.4, + "learning_rate": 1.898219980147993e-05, + "loss": 1.3365, + "step": 773 + }, + { + "epoch": 2.41, + "learning_rate": 1.878940309989633e-05, + "loss": 1.3091, + "step": 774 + }, + { + "epoch": 2.41, + "learning_rate": 1.859748892176133e-05, + "loss": 1.3401, + "step": 775 + }, + { + "epoch": 2.41, + "learning_rate": 1.840645935262497e-05, + "loss": 1.3562, + "step": 776 + }, + { + "epoch": 2.42, + "learning_rate": 1.8216316468424098e-05, + "loss": 1.3201, + "step": 777 + }, + { + "epoch": 2.42, + "learning_rate": 1.8027062335459977e-05, + "loss": 1.2757, + "step": 778 + }, + { + "epoch": 2.42, + "learning_rate": 1.7838699010375625e-05, + "loss": 1.3541, + "step": 779 + }, + { + "epoch": 2.43, + "learning_rate": 1.7651228540133623e-05, + "loss": 1.3491, + "step": 780 + }, + { + "epoch": 2.43, + "learning_rate": 1.7464652961993768e-05, + "loss": 1.2903, + "step": 781 + }, + { + "epoch": 2.43, + "learning_rate": 1.727897430349097e-05, + "loss": 1.3879, + "step": 782 + }, + { + "epoch": 2.44, + "learning_rate": 1.7094194582413326e-05, + "loss": 1.3311, + "step": 783 + }, + { + "epoch": 2.44, + "learning_rate": 1.6910315806779987e-05, + "loss": 1.34, + "step": 784 + }, + { + "epoch": 2.44, + "learning_rate": 1.6727339974819456e-05, + "loss": 1.3331, + "step": 785 + }, + { + "epoch": 2.44, + "learning_rate": 1.6545269074947922e-05, + "loss": 1.3164, + "step": 786 + }, + { + "epoch": 2.45, + "learning_rate": 1.636410508574753e-05, + "loss": 1.3505, + "step": 787 + }, + { + "epoch": 2.45, + "learning_rate": 1.618384997594494e-05, + "loss": 1.2556, + "step": 788 + }, + { + "epoch": 2.45, + "learning_rate": 1.6004505704389983e-05, + "loss": 1.3023, + "step": 789 + }, + { + "epoch": 2.46, + "learning_rate": 1.5826074220034226e-05, + "loss": 1.3524, + "step": 790 + }, + { + "epoch": 2.46, + "learning_rate": 1.5648557461910018e-05, + "loss": 1.3215, + "step": 791 + }, + { + "epoch": 2.46, + "learning_rate": 1.547195735910919e-05, + "loss": 1.3593, + "step": 792 + }, + { + "epoch": 2.47, + "learning_rate": 1.5296275830762206e-05, + "loss": 1.3482, + "step": 793 + }, + { + "epoch": 2.47, + "learning_rate": 1.5121514786017365e-05, + "loss": 1.3521, + "step": 794 + }, + { + "epoch": 2.47, + "learning_rate": 1.4947676124019839e-05, + "loss": 1.3138, + "step": 795 + }, + { + "epoch": 2.48, + "learning_rate": 1.4774761733891319e-05, + "loss": 1.3701, + "step": 796 + }, + { + "epoch": 2.48, + "learning_rate": 1.4602773494709254e-05, + "loss": 1.3408, + "step": 797 + }, + { + "epoch": 2.48, + "learning_rate": 1.4431713275486602e-05, + "loss": 1.343, + "step": 798 + }, + { + "epoch": 2.49, + "learning_rate": 1.4261582935151352e-05, + "loss": 1.2744, + "step": 799 + }, + { + "epoch": 2.49, + "learning_rate": 1.4092384322526442e-05, + "loss": 1.3453, + "step": 800 + }, + { + "epoch": 2.49, + "eval_loss": 1.6718111038208008, + "eval_runtime": 233.7605, + "eval_samples_per_second": 16.354, + "eval_steps_per_second": 4.09, + "step": 800 + }, + { + "epoch": 2.49, + "learning_rate": 1.3924119276309677e-05, + "loss": 1.2647, + "step": 801 + }, + { + "epoch": 2.49, + "learning_rate": 1.3756789625053601e-05, + "loss": 1.321, + "step": 802 + }, + { + "epoch": 2.5, + "learning_rate": 1.3590397187145853e-05, + "loss": 1.3403, + "step": 803 + }, + { + "epoch": 2.5, + "learning_rate": 1.3424943770789211e-05, + "loss": 1.3191, + "step": 804 + }, + { + "epoch": 2.5, + "learning_rate": 1.3260431173982001e-05, + "loss": 1.2983, + "step": 805 + }, + { + "epoch": 2.51, + "learning_rate": 1.3096861184498643e-05, + "loss": 1.2955, + "step": 806 + }, + { + "epoch": 2.51, + "learning_rate": 1.293423557987009e-05, + "loss": 1.3297, + "step": 807 + }, + { + "epoch": 2.51, + "learning_rate": 1.2772556127364588e-05, + "loss": 1.3273, + "step": 808 + }, + { + "epoch": 2.52, + "learning_rate": 1.2611824583968457e-05, + "loss": 1.2867, + "step": 809 + }, + { + "epoch": 2.52, + "learning_rate": 1.2452042696366984e-05, + "loss": 1.3132, + "step": 810 + }, + { + "epoch": 2.52, + "learning_rate": 1.229321220092552e-05, + "loss": 1.323, + "step": 811 + }, + { + "epoch": 2.53, + "learning_rate": 1.2135334823670452e-05, + "loss": 1.3332, + "step": 812 + }, + { + "epoch": 2.53, + "learning_rate": 1.1978412280270568e-05, + "loss": 1.2775, + "step": 813 + }, + { + "epoch": 2.53, + "learning_rate": 1.182244627601845e-05, + "loss": 1.3049, + "step": 814 + }, + { + "epoch": 2.53, + "learning_rate": 1.1667438505811801e-05, + "loss": 1.3206, + "step": 815 + }, + { + "epoch": 2.54, + "learning_rate": 1.1513390654135103e-05, + "loss": 1.386, + "step": 816 + }, + { + "epoch": 2.54, + "learning_rate": 1.1360304395041343e-05, + "loss": 1.3292, + "step": 817 + }, + { + "epoch": 2.54, + "learning_rate": 1.1208181392133766e-05, + "loss": 1.3249, + "step": 818 + }, + { + "epoch": 2.55, + "learning_rate": 1.1057023298547864e-05, + "loss": 1.2934, + "step": 819 + }, + { + "epoch": 2.55, + "learning_rate": 1.0906831756933267e-05, + "loss": 1.3471, + "step": 820 + }, + { + "epoch": 2.55, + "learning_rate": 1.0757608399436125e-05, + "loss": 1.3505, + "step": 821 + }, + { + "epoch": 2.56, + "learning_rate": 1.0609354847681152e-05, + "loss": 1.283, + "step": 822 + }, + { + "epoch": 2.56, + "learning_rate": 1.0462072712754035e-05, + "loss": 1.2679, + "step": 823 + }, + { + "epoch": 2.56, + "learning_rate": 1.0315763595184113e-05, + "loss": 1.3317, + "step": 824 + }, + { + "epoch": 2.57, + "learning_rate": 1.0170429084926746e-05, + "loss": 1.308, + "step": 825 + }, + { + "epoch": 2.57, + "learning_rate": 1.0026070761346229e-05, + "loss": 1.2816, + "step": 826 + }, + { + "epoch": 2.57, + "learning_rate": 9.882690193198463e-06, + "loss": 1.2712, + "step": 827 + }, + { + "epoch": 2.58, + "learning_rate": 9.740288938613995e-06, + "loss": 1.3133, + "step": 828 + }, + { + "epoch": 2.58, + "learning_rate": 9.598868545081153e-06, + "loss": 1.257, + "step": 829 + }, + { + "epoch": 2.58, + "learning_rate": 9.458430549429032e-06, + "loss": 1.3271, + "step": 830 + }, + { + "epoch": 2.58, + "learning_rate": 9.318976477811026e-06, + "loss": 1.3329, + "step": 831 + }, + { + "epoch": 2.59, + "learning_rate": 9.18050784568808e-06, + "loss": 1.2939, + "step": 832 + }, + { + "epoch": 2.59, + "learning_rate": 9.043026157812229e-06, + "loss": 1.3111, + "step": 833 + }, + { + "epoch": 2.59, + "learning_rate": 8.906532908210396e-06, + "loss": 1.3164, + "step": 834 + }, + { + "epoch": 2.6, + "learning_rate": 8.771029580167967e-06, + "loss": 1.3162, + "step": 835 + }, + { + "epoch": 2.6, + "learning_rate": 8.636517646212761e-06, + "loss": 1.303, + "step": 836 + }, + { + "epoch": 2.6, + "learning_rate": 8.502998568099063e-06, + "loss": 1.3545, + "step": 837 + }, + { + "epoch": 2.61, + "learning_rate": 8.370473796791622e-06, + "loss": 1.3224, + "step": 838 + }, + { + "epoch": 2.61, + "learning_rate": 8.238944772450064e-06, + "loss": 1.3146, + "step": 839 + }, + { + "epoch": 2.61, + "learning_rate": 8.108412924413056e-06, + "loss": 1.3171, + "step": 840 + }, + { + "epoch": 2.62, + "learning_rate": 7.978879671182848e-06, + "loss": 1.3209, + "step": 841 + }, + { + "epoch": 2.62, + "learning_rate": 7.850346420409949e-06, + "loss": 1.3143, + "step": 842 + }, + { + "epoch": 2.62, + "learning_rate": 7.722814568877646e-06, + "loss": 1.3112, + "step": 843 + }, + { + "epoch": 2.63, + "learning_rate": 7.596285502486966e-06, + "loss": 1.3056, + "step": 844 + }, + { + "epoch": 2.63, + "learning_rate": 7.4707605962415775e-06, + "loss": 1.3151, + "step": 845 + }, + { + "epoch": 2.63, + "learning_rate": 7.346241214232819e-06, + "loss": 1.3774, + "step": 846 + }, + { + "epoch": 2.63, + "learning_rate": 7.222728709624949e-06, + "loss": 1.3432, + "step": 847 + }, + { + "epoch": 2.64, + "learning_rate": 7.100224424640312e-06, + "loss": 1.3036, + "step": 848 + }, + { + "epoch": 2.64, + "learning_rate": 6.978729690544927e-06, + "loss": 1.2911, + "step": 849 + }, + { + "epoch": 2.64, + "learning_rate": 6.858245827633869e-06, + "loss": 1.3458, + "step": 850 + }, + { + "epoch": 2.64, + "eval_loss": 1.6725014448165894, + "eval_runtime": 233.7534, + "eval_samples_per_second": 16.355, + "eval_steps_per_second": 4.09, + "step": 850 + }, + { + "epoch": 2.65, + "learning_rate": 6.7387741452169415e-06, + "loss": 1.2943, + "step": 851 + }, + { + "epoch": 2.65, + "learning_rate": 6.6203159416045605e-06, + "loss": 1.3108, + "step": 852 + }, + { + "epoch": 2.65, + "learning_rate": 6.502872504093527e-06, + "loss": 1.2836, + "step": 853 + }, + { + "epoch": 2.66, + "learning_rate": 6.3864451089530985e-06, + "loss": 1.3342, + "step": 854 + }, + { + "epoch": 2.66, + "learning_rate": 6.271035021411098e-06, + "loss": 1.304, + "step": 855 + }, + { + "epoch": 2.66, + "learning_rate": 6.156643495640157e-06, + "loss": 1.3163, + "step": 856 + }, + { + "epoch": 2.67, + "learning_rate": 6.043271774744086e-06, + "loss": 1.3385, + "step": 857 + }, + { + "epoch": 2.67, + "learning_rate": 5.930921090744402e-06, + "loss": 1.2856, + "step": 858 + }, + { + "epoch": 2.67, + "learning_rate": 5.81959266456692e-06, + "loss": 1.3414, + "step": 859 + }, + { + "epoch": 2.67, + "learning_rate": 5.709287706028454e-06, + "loss": 1.3353, + "step": 860 + }, + { + "epoch": 2.68, + "learning_rate": 5.600007413823693e-06, + "loss": 1.3286, + "step": 861 + }, + { + "epoch": 2.68, + "learning_rate": 5.491752975512232e-06, + "loss": 1.3089, + "step": 862 + }, + { + "epoch": 2.68, + "learning_rate": 5.38452556750555e-06, + "loss": 1.3385, + "step": 863 + }, + { + "epoch": 2.69, + "learning_rate": 5.278326355054308e-06, + "loss": 1.34, + "step": 864 + }, + { + "epoch": 2.69, + "learning_rate": 5.173156492235665e-06, + "loss": 1.328, + "step": 865 + }, + { + "epoch": 2.69, + "learning_rate": 5.069017121940733e-06, + "loss": 1.2771, + "step": 866 + }, + { + "epoch": 2.7, + "learning_rate": 4.96590937586221e-06, + "loss": 1.2927, + "step": 867 + }, + { + "epoch": 2.7, + "learning_rate": 4.863834374481946e-06, + "loss": 1.2976, + "step": 868 + }, + { + "epoch": 2.7, + "learning_rate": 4.762793227058915e-06, + "loss": 1.3291, + "step": 869 + }, + { + "epoch": 2.71, + "learning_rate": 4.662787031617122e-06, + "loss": 1.3162, + "step": 870 + }, + { + "epoch": 2.71, + "learning_rate": 4.563816874933547e-06, + "loss": 1.287, + "step": 871 + }, + { + "epoch": 2.71, + "learning_rate": 4.465883832526552e-06, + "loss": 1.3419, + "step": 872 + }, + { + "epoch": 2.72, + "learning_rate": 4.368988968644006e-06, + "loss": 1.3645, + "step": 873 + }, + { + "epoch": 2.72, + "learning_rate": 4.2731333362518e-06, + "loss": 1.3361, + "step": 874 + }, + { + "epoch": 2.72, + "learning_rate": 4.1783179770224275e-06, + "loss": 1.3006, + "step": 875 + }, + { + "epoch": 2.72, + "learning_rate": 4.084543921323591e-06, + "loss": 1.2943, + "step": 876 + }, + { + "epoch": 2.73, + "learning_rate": 3.991812188207112e-06, + "loss": 1.3161, + "step": 877 + }, + { + "epoch": 2.73, + "learning_rate": 3.90012378539768e-06, + "loss": 1.3414, + "step": 878 + }, + { + "epoch": 2.73, + "learning_rate": 3.8094797092821264e-06, + "loss": 1.3094, + "step": 879 + }, + { + "epoch": 2.74, + "learning_rate": 3.7198809448984128e-06, + "loss": 1.2949, + "step": 880 + }, + { + "epoch": 2.74, + "learning_rate": 3.6313284659250215e-06, + "loss": 1.3336, + "step": 881 + }, + { + "epoch": 2.74, + "learning_rate": 3.5438232346703627e-06, + "loss": 1.3238, + "step": 882 + }, + { + "epoch": 2.75, + "learning_rate": 3.457366202062284e-06, + "loss": 1.3209, + "step": 883 + }, + { + "epoch": 2.75, + "learning_rate": 3.371958307637746e-06, + "loss": 1.352, + "step": 884 + }, + { + "epoch": 2.75, + "learning_rate": 3.287600479532649e-06, + "loss": 1.3234, + "step": 885 + }, + { + "epoch": 2.76, + "learning_rate": 3.204293634471689e-06, + "loss": 1.2995, + "step": 886 + }, + { + "epoch": 2.76, + "learning_rate": 3.1220386777584764e-06, + "loss": 1.3228, + "step": 887 + }, + { + "epoch": 2.76, + "learning_rate": 3.0408365032656093e-06, + "loss": 1.3059, + "step": 888 + }, + { + "epoch": 2.77, + "learning_rate": 2.960687993425004e-06, + "loss": 1.2848, + "step": 889 + }, + { + "epoch": 2.77, + "learning_rate": 2.8815940192183033e-06, + "loss": 1.3639, + "step": 890 + }, + { + "epoch": 2.77, + "learning_rate": 2.803555440167427e-06, + "loss": 1.3454, + "step": 891 + }, + { + "epoch": 2.77, + "learning_rate": 2.7265731043251807e-06, + "loss": 1.317, + "step": 892 + }, + { + "epoch": 2.78, + "learning_rate": 2.6506478482661077e-06, + "loss": 1.3105, + "step": 893 + }, + { + "epoch": 2.78, + "learning_rate": 2.575780497077307e-06, + "loss": 1.2993, + "step": 894 + }, + { + "epoch": 2.78, + "learning_rate": 2.501971864349606e-06, + "loss": 1.3332, + "step": 895 + }, + { + "epoch": 2.79, + "learning_rate": 2.429222752168547e-06, + "loss": 1.3281, + "step": 896 + }, + { + "epoch": 2.79, + "learning_rate": 2.357533951105839e-06, + "loss": 1.3206, + "step": 897 + }, + { + "epoch": 2.79, + "learning_rate": 2.28690624021064e-06, + "loss": 1.3227, + "step": 898 + }, + { + "epoch": 2.8, + "learning_rate": 2.217340387001121e-06, + "loss": 1.3449, + "step": 899 + }, + { + "epoch": 2.8, + "learning_rate": 2.1488371474562063e-06, + "loss": 1.3016, + "step": 900 + }, + { + "epoch": 2.8, + "eval_loss": 1.6737442016601562, + "eval_runtime": 233.8114, + "eval_samples_per_second": 16.351, + "eval_steps_per_second": 4.089, + "step": 900 + }, + { + "epoch": 2.8, + "learning_rate": 2.081397266007223e-06, + "loss": 1.3462, + "step": 901 + }, + { + "epoch": 2.81, + "learning_rate": 2.0150214755299435e-06, + "loss": 1.3186, + "step": 902 + }, + { + "epoch": 2.81, + "learning_rate": 1.949710497336532e-06, + "loss": 1.3164, + "step": 903 + }, + { + "epoch": 2.81, + "learning_rate": 1.885465041167711e-06, + "loss": 1.3049, + "step": 904 + }, + { + "epoch": 2.81, + "learning_rate": 1.822285805185142e-06, + "loss": 1.3177, + "step": 905 + }, + { + "epoch": 2.82, + "learning_rate": 1.7601734759636668e-06, + "loss": 1.307, + "step": 906 + }, + { + "epoch": 2.82, + "learning_rate": 1.699128728484034e-06, + "loss": 1.3226, + "step": 907 + }, + { + "epoch": 2.82, + "learning_rate": 1.6391522261254511e-06, + "loss": 1.2788, + "step": 908 + }, + { + "epoch": 2.83, + "learning_rate": 1.5802446206583665e-06, + "loss": 1.2765, + "step": 909 + }, + { + "epoch": 2.83, + "learning_rate": 1.5224065522374986e-06, + "loss": 1.2689, + "step": 910 + }, + { + "epoch": 2.83, + "learning_rate": 1.4656386493947293e-06, + "loss": 1.2668, + "step": 911 + }, + { + "epoch": 2.84, + "learning_rate": 1.409941529032377e-06, + "loss": 1.315, + "step": 912 + }, + { + "epoch": 2.84, + "learning_rate": 1.3553157964164676e-06, + "loss": 1.3632, + "step": 913 + }, + { + "epoch": 2.84, + "learning_rate": 1.3017620451701406e-06, + "loss": 1.356, + "step": 914 + }, + { + "epoch": 2.85, + "learning_rate": 1.249280857267221e-06, + "loss": 1.261, + "step": 915 + }, + { + "epoch": 2.85, + "learning_rate": 1.197872803025879e-06, + "loss": 1.3367, + "step": 916 + }, + { + "epoch": 2.85, + "learning_rate": 1.1475384411024247e-06, + "loss": 1.3263, + "step": 917 + }, + { + "epoch": 2.86, + "learning_rate": 1.0982783184852686e-06, + "loss": 1.3102, + "step": 918 + }, + { + "epoch": 2.86, + "learning_rate": 1.0500929704889585e-06, + "loss": 1.3241, + "step": 919 + }, + { + "epoch": 2.86, + "learning_rate": 1.002982920748341e-06, + "loss": 1.2908, + "step": 920 + }, + { + "epoch": 2.86, + "learning_rate": 9.569486812129102e-07, + "loss": 1.2669, + "step": 921 + }, + { + "epoch": 2.87, + "learning_rate": 9.119907521412119e-07, + "loss": 1.2809, + "step": 922 + }, + { + "epoch": 2.87, + "learning_rate": 8.681096220954477e-07, + "loss": 1.3074, + "step": 923 + }, + { + "epoch": 2.87, + "learning_rate": 8.253057679361021e-07, + "loss": 1.2716, + "step": 924 + }, + { + "epoch": 2.88, + "learning_rate": 7.835796548168351e-07, + "loss": 1.2728, + "step": 925 + }, + { + "epoch": 2.88, + "learning_rate": 7.429317361793641e-07, + "loss": 1.3208, + "step": 926 + }, + { + "epoch": 2.88, + "learning_rate": 7.033624537485683e-07, + "loss": 1.3164, + "step": 927 + }, + { + "epoch": 2.89, + "learning_rate": 6.648722375277028e-07, + "loss": 1.274, + "step": 928 + }, + { + "epoch": 2.89, + "learning_rate": 6.274615057936584e-07, + "loss": 1.2541, + "step": 929 + }, + { + "epoch": 2.89, + "learning_rate": 5.911306650925208e-07, + "loss": 1.326, + "step": 930 + }, + { + "epoch": 2.9, + "learning_rate": 5.55880110235052e-07, + "loss": 1.3214, + "step": 931 + }, + { + "epoch": 2.9, + "learning_rate": 5.21710224292471e-07, + "loss": 1.2606, + "step": 932 + }, + { + "epoch": 2.9, + "learning_rate": 4.886213785922245e-07, + "loss": 1.2989, + "step": 933 + }, + { + "epoch": 2.91, + "learning_rate": 4.5661393271402287e-07, + "loss": 1.3629, + "step": 934 + }, + { + "epoch": 2.91, + "learning_rate": 4.2568823448591034e-07, + "loss": 1.3304, + "step": 935 + }, + { + "epoch": 2.91, + "learning_rate": 3.9584461998043445e-07, + "loss": 1.339, + "step": 936 + }, + { + "epoch": 2.91, + "learning_rate": 3.670834135110601e-07, + "loss": 1.2753, + "step": 937 + }, + { + "epoch": 2.92, + "learning_rate": 3.39404927628606e-07, + "loss": 1.3083, + "step": 938 + }, + { + "epoch": 2.92, + "learning_rate": 3.128094631178913e-07, + "loss": 1.3116, + "step": 939 + }, + { + "epoch": 2.92, + "learning_rate": 2.8729730899438313e-07, + "loss": 1.2998, + "step": 940 + }, + { + "epoch": 2.93, + "learning_rate": 2.6286874250116553e-07, + "loss": 1.3133, + "step": 941 + }, + { + "epoch": 2.93, + "learning_rate": 2.3952402910581985e-07, + "loss": 1.3089, + "step": 942 + }, + { + "epoch": 2.93, + "learning_rate": 2.1726342249763776e-07, + "loss": 1.259, + "step": 943 + }, + { + "epoch": 2.94, + "learning_rate": 1.960871645847795e-07, + "loss": 1.3196, + "step": 944 + }, + { + "epoch": 2.94, + "learning_rate": 1.7599548549170897e-07, + "loss": 1.3302, + "step": 945 + }, + { + "epoch": 2.94, + "learning_rate": 1.5698860355665146e-07, + "loss": 1.2932, + "step": 946 + }, + { + "epoch": 2.95, + "learning_rate": 1.390667253292288e-07, + "loss": 1.3472, + "step": 947 + }, + { + "epoch": 2.95, + "learning_rate": 1.2223004556825014e-07, + "loss": 1.3076, + "step": 948 + }, + { + "epoch": 2.95, + "learning_rate": 1.0647874723954676e-07, + "loss": 1.3167, + "step": 949 + }, + { + "epoch": 2.95, + "learning_rate": 9.181300151399618e-08, + "loss": 1.3018, + "step": 950 + }, + { + "epoch": 2.95, + "eval_loss": 1.673557162284851, + "eval_runtime": 233.9095, + "eval_samples_per_second": 16.344, + "eval_steps_per_second": 4.087, + "step": 950 + } + ], + "logging_steps": 1, + "max_steps": 963, + "num_train_epochs": 3, + "save_steps": 50, + "total_flos": 2.664058074641203e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-950/training_args.bin b/checkpoint-950/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..25049b3d1421c700cce988a7b926327f5a7c7a75 --- /dev/null +++ b/checkpoint-950/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f61cafb89242b653e455003b5517e685ecccfa6180af5fb7d0bfb35b4fc77a4 +size 4475 diff --git a/config.json b/config.json new file mode 100644 index 0000000000000000000000000000000000000000..308bea154ae41d99b1a0610a33dd905f376bef60 --- /dev/null +++ b/config.json @@ -0,0 +1,40 @@ +{ + "_name_or_path": "./mistralai_Mistral-7B-v0.1", + "architectures": [ + "MistralForCausalLM" + ], + "attention_bias": false, + "bos_token_id": 1, + "eos_token_id": 2, + "hidden_act": "silu", + "hidden_size": 4096, + "initializer_range": 0.02, + "intermediate_size": 14336, + "max_position_embeddings": 32768, + "model_type": "llama", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 8, + "pretraining_tp": 1, + "quantization_config": { + "bnb_4bit_compute_dtype": "bfloat16", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false, + "quant_method": "bitsandbytes" + }, + "rms_norm_eps": 1e-05, + "rope_scaling": null, + "rope_theta": 10000.0, + "sliding_window": 4096, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.34.0.dev0", + "use_cache": false, + "vocab_size": 32000 +} diff --git a/special_tokens_map.json b/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..9bfed7513d3b1b65445af10c4571256f4a19b290 --- /dev/null +++ b/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/tokenizer.model b/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..8b443ef19c2a19acc3ac64fb9c3db4a72921dff6 --- /dev/null +++ b/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055 +size 493443 diff --git a/tokenizer_config.json b/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7c06bce08e94887ff980178983d9d31a7a962903 --- /dev/null +++ b/tokenizer_config.json @@ -0,0 +1,45 @@ +{ + "add_bos_token": true, + "add_eos_token": false, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [], + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "legacy": true, + "model_max_length": 1000000000000000019884624838656, + "pad_token": null, + "sp_model_kwargs": {}, + "spaces_between_special_tokens": false, + "tokenizer_class": "LlamaTokenizer", + "tokenizer_file": "./mistralai_Mistral-7B-v0.1/tokenizer.json", + "trust_remote_code": false, + "unk_token": "", + "use_default_system_prompt": true, + "use_fast": true +}