Mousaicv commited on
Commit
8f6f1b7
1 Parent(s): 4bd664a

selfrag zephyr-7b-sft-lora

Browse files
README.md CHANGED
@@ -1,3 +1,64 @@
1
  ---
2
- license: apache-2.0
 
 
 
 
 
 
 
 
3
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ base_model: mrs-7b
3
+ tags:
4
+ - alignment-handbook
5
+ - generated_from_trainer
6
+ datasets:
7
+ - gpt4_reward_with_format
8
+ model-index:
9
+ - name: zephyr-7b-sft-lora
10
+ results: []
11
  ---
12
+
13
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
14
+ should probably proofread and complete it, then remove this comment. -->
15
+
16
+ # zephyr-7b-sft-lora
17
+
18
+ This model is a fine-tuned version of [mrs-7b](https://huggingface.co/mrs-7b) on the gpt4_reward_with_format dataset.
19
+ It achieves the following results on the evaluation set:
20
+ - Loss: 0.0911
21
+
22
+ ## Model description
23
+
24
+ More information needed
25
+
26
+ ## Intended uses & limitations
27
+
28
+ More information needed
29
+
30
+ ## Training and evaluation data
31
+
32
+ More information needed
33
+
34
+ ## Training procedure
35
+
36
+ ### Training hyperparameters
37
+
38
+ The following hyperparameters were used during training:
39
+ - learning_rate: 2e-05
40
+ - train_batch_size: 3
41
+ - eval_batch_size: 2
42
+ - seed: 42
43
+ - distributed_type: multi-GPU
44
+ - gradient_accumulation_steps: 64
45
+ - total_train_batch_size: 192
46
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
47
+ - lr_scheduler_type: cosine
48
+ - num_epochs: 3
49
+
50
+ ### Training results
51
+
52
+ | Training Loss | Epoch | Step | Validation Loss |
53
+ |:-------------:|:-----:|:----:|:---------------:|
54
+ | 0.1001 | 1.0 | 219 | 0.1006 |
55
+ | 0.0969 | 2.0 | 439 | 0.0930 |
56
+ | 0.0795 | 2.99 | 657 | 0.0911 |
57
+
58
+
59
+ ### Framework versions
60
+
61
+ - Transformers 4.35.0
62
+ - Pytorch 2.1.1+cu121
63
+ - Datasets 2.14.6
64
+ - Tokenizers 0.14.1
adapter_config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "mrs-7b",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "lora_alpha": 16,
12
+ "lora_dropout": 0.1,
13
+ "modules_to_save": null,
14
+ "peft_type": "LORA",
15
+ "r": 64,
16
+ "rank_pattern": {},
17
+ "revision": null,
18
+ "target_modules": [
19
+ "v_proj",
20
+ "q_proj",
21
+ "k_proj",
22
+ "o_proj"
23
+ ],
24
+ "task_type": "CAUSAL_LM"
25
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:331f28c831a5ddd16c304b42d71ea032cd36799231ff9b2a2bb2ebe22e5d58a8
3
+ size 218138576
all_results.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.99,
3
+ "eval_loss": 0.09112608432769775,
4
+ "eval_runtime": 1063.6125,
5
+ "eval_samples": 4693,
6
+ "eval_samples_per_second": 4.412,
7
+ "eval_steps_per_second": 2.207,
8
+ "train_loss": 0.11520915337848155,
9
+ "train_runtime": 83607.727,
10
+ "train_samples": 42230,
11
+ "train_samples_per_second": 1.515,
12
+ "train_steps_per_second": 0.008
13
+ }
config.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "mrs-7b",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 4096,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 14336,
12
+ "max_position_embeddings": 32768,
13
+ "model_type": "mistral",
14
+ "num_attention_heads": 32,
15
+ "num_hidden_layers": 32,
16
+ "num_key_value_heads": 8,
17
+ "quantization_config": {
18
+ "bnb_4bit_compute_dtype": "float16",
19
+ "bnb_4bit_quant_type": "nf4",
20
+ "bnb_4bit_use_double_quant": false,
21
+ "llm_int8_enable_fp32_cpu_offload": false,
22
+ "llm_int8_has_fp16_weight": false,
23
+ "llm_int8_skip_modules": null,
24
+ "llm_int8_threshold": 6.0,
25
+ "load_in_4bit": true,
26
+ "load_in_8bit": false,
27
+ "quant_method": "bitsandbytes"
28
+ },
29
+ "rms_norm_eps": 1e-05,
30
+ "rope_theta": 10000.0,
31
+ "sliding_window": 4096,
32
+ "tie_word_embeddings": false,
33
+ "torch_dtype": "bfloat16",
34
+ "transformers_version": "4.35.0",
35
+ "use_cache": true,
36
+ "vocab_size": 32000
37
+ }
eval_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.99,
3
+ "eval_loss": 0.09112608432769775,
4
+ "eval_runtime": 1063.6125,
5
+ "eval_samples": 4693,
6
+ "eval_samples_per_second": 4.412,
7
+ "eval_steps_per_second": 2.207
8
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "</s>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<unk>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<s>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "</s>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ }
27
+ },
28
+ "additional_special_tokens": [],
29
+ "bos_token": "<s>",
30
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
31
+ "clean_up_tokenization_spaces": false,
32
+ "eos_token": "</s>",
33
+ "legacy": true,
34
+ "model_max_length": 2048,
35
+ "pad_token": "</s>",
36
+ "sp_model_kwargs": {},
37
+ "spaces_between_special_tokens": false,
38
+ "tokenizer_class": "LlamaTokenizer",
39
+ "unk_token": "<unk>",
40
+ "use_default_system_prompt": true
41
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.99,
3
+ "train_loss": 0.11520915337848155,
4
+ "train_runtime": 83607.727,
5
+ "train_samples": 42230,
6
+ "train_samples_per_second": 1.515,
7
+ "train_steps_per_second": 0.008
8
+ }
trainer_state.json ADDED
@@ -0,0 +1,3994 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.9870000710378632,
5
+ "eval_steps": 500,
6
+ "global_step": 657,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.0,
13
+ "learning_rate": 1.9999885675796825e-05,
14
+ "loss": 1.2537,
15
+ "step": 1
16
+ },
17
+ {
18
+ "epoch": 0.01,
19
+ "learning_rate": 1.9999542705801295e-05,
20
+ "loss": 1.1439,
21
+ "step": 2
22
+ },
23
+ {
24
+ "epoch": 0.01,
25
+ "learning_rate": 1.9998971097855372e-05,
26
+ "loss": 0.9631,
27
+ "step": 3
28
+ },
29
+ {
30
+ "epoch": 0.02,
31
+ "learning_rate": 1.9998170865028774e-05,
32
+ "loss": 0.9216,
33
+ "step": 4
34
+ },
35
+ {
36
+ "epoch": 0.02,
37
+ "learning_rate": 1.99971420256187e-05,
38
+ "loss": 0.771,
39
+ "step": 5
40
+ },
41
+ {
42
+ "epoch": 0.03,
43
+ "learning_rate": 1.9995884603149403e-05,
44
+ "loss": 0.7876,
45
+ "step": 6
46
+ },
47
+ {
48
+ "epoch": 0.03,
49
+ "learning_rate": 1.9994398626371643e-05,
50
+ "loss": 0.5925,
51
+ "step": 7
52
+ },
53
+ {
54
+ "epoch": 0.04,
55
+ "learning_rate": 1.9992684129262038e-05,
56
+ "loss": 0.5688,
57
+ "step": 8
58
+ },
59
+ {
60
+ "epoch": 0.04,
61
+ "learning_rate": 1.9990741151022302e-05,
62
+ "loss": 0.5276,
63
+ "step": 9
64
+ },
65
+ {
66
+ "epoch": 0.05,
67
+ "learning_rate": 1.9988569736078315e-05,
68
+ "loss": 0.4483,
69
+ "step": 10
70
+ },
71
+ {
72
+ "epoch": 0.05,
73
+ "learning_rate": 1.9986169934079135e-05,
74
+ "loss": 0.4298,
75
+ "step": 11
76
+ },
77
+ {
78
+ "epoch": 0.05,
79
+ "learning_rate": 1.998354179989585e-05,
80
+ "loss": 0.3731,
81
+ "step": 12
82
+ },
83
+ {
84
+ "epoch": 0.06,
85
+ "learning_rate": 1.998068539362034e-05,
86
+ "loss": 0.4158,
87
+ "step": 13
88
+ },
89
+ {
90
+ "epoch": 0.06,
91
+ "learning_rate": 1.9977600780563863e-05,
92
+ "loss": 0.3014,
93
+ "step": 14
94
+ },
95
+ {
96
+ "epoch": 0.07,
97
+ "learning_rate": 1.997428803125562e-05,
98
+ "loss": 0.311,
99
+ "step": 15
100
+ },
101
+ {
102
+ "epoch": 0.07,
103
+ "learning_rate": 1.9970747221441084e-05,
104
+ "loss": 0.2518,
105
+ "step": 16
106
+ },
107
+ {
108
+ "epoch": 0.08,
109
+ "learning_rate": 1.9966978432080316e-05,
110
+ "loss": 0.216,
111
+ "step": 17
112
+ },
113
+ {
114
+ "epoch": 0.08,
115
+ "learning_rate": 1.996298174934608e-05,
116
+ "loss": 0.2503,
117
+ "step": 18
118
+ },
119
+ {
120
+ "epoch": 0.09,
121
+ "learning_rate": 1.995875726462189e-05,
122
+ "loss": 0.255,
123
+ "step": 19
124
+ },
125
+ {
126
+ "epoch": 0.09,
127
+ "learning_rate": 1.9954305074499916e-05,
128
+ "loss": 0.2365,
129
+ "step": 20
130
+ },
131
+ {
132
+ "epoch": 0.1,
133
+ "learning_rate": 1.994962528077878e-05,
134
+ "loss": 0.2665,
135
+ "step": 21
136
+ },
137
+ {
138
+ "epoch": 0.1,
139
+ "learning_rate": 1.9944717990461208e-05,
140
+ "loss": 0.2059,
141
+ "step": 22
142
+ },
143
+ {
144
+ "epoch": 0.1,
145
+ "learning_rate": 1.9939583315751624e-05,
146
+ "loss": 0.257,
147
+ "step": 23
148
+ },
149
+ {
150
+ "epoch": 0.11,
151
+ "learning_rate": 1.9934221374053538e-05,
152
+ "loss": 0.2509,
153
+ "step": 24
154
+ },
155
+ {
156
+ "epoch": 0.11,
157
+ "learning_rate": 1.99286322879669e-05,
158
+ "loss": 0.2405,
159
+ "step": 25
160
+ },
161
+ {
162
+ "epoch": 0.12,
163
+ "learning_rate": 1.9922816185285264e-05,
164
+ "loss": 0.2793,
165
+ "step": 26
166
+ },
167
+ {
168
+ "epoch": 0.12,
169
+ "learning_rate": 1.99167731989929e-05,
170
+ "loss": 0.2513,
171
+ "step": 27
172
+ },
173
+ {
174
+ "epoch": 0.13,
175
+ "learning_rate": 1.9910503467261724e-05,
176
+ "loss": 0.2923,
177
+ "step": 28
178
+ },
179
+ {
180
+ "epoch": 0.13,
181
+ "learning_rate": 1.9904007133448147e-05,
182
+ "loss": 0.258,
183
+ "step": 29
184
+ },
185
+ {
186
+ "epoch": 0.14,
187
+ "learning_rate": 1.989728434608981e-05,
188
+ "loss": 0.3037,
189
+ "step": 30
190
+ },
191
+ {
192
+ "epoch": 0.14,
193
+ "learning_rate": 1.9890335258902177e-05,
194
+ "loss": 0.1534,
195
+ "step": 31
196
+ },
197
+ {
198
+ "epoch": 0.15,
199
+ "learning_rate": 1.9883160030775018e-05,
200
+ "loss": 0.1594,
201
+ "step": 32
202
+ },
203
+ {
204
+ "epoch": 0.15,
205
+ "learning_rate": 1.987575882576878e-05,
206
+ "loss": 0.2293,
207
+ "step": 33
208
+ },
209
+ {
210
+ "epoch": 0.15,
211
+ "learning_rate": 1.9868131813110835e-05,
212
+ "loss": 0.2364,
213
+ "step": 34
214
+ },
215
+ {
216
+ "epoch": 0.16,
217
+ "learning_rate": 1.986027916719161e-05,
218
+ "loss": 0.1932,
219
+ "step": 35
220
+ },
221
+ {
222
+ "epoch": 0.16,
223
+ "learning_rate": 1.9852201067560607e-05,
224
+ "loss": 0.2004,
225
+ "step": 36
226
+ },
227
+ {
228
+ "epoch": 0.17,
229
+ "learning_rate": 1.9843897698922284e-05,
230
+ "loss": 0.1773,
231
+ "step": 37
232
+ },
233
+ {
234
+ "epoch": 0.17,
235
+ "learning_rate": 1.9835369251131847e-05,
236
+ "loss": 0.2215,
237
+ "step": 38
238
+ },
239
+ {
240
+ "epoch": 0.18,
241
+ "learning_rate": 1.9826615919190886e-05,
242
+ "loss": 0.1996,
243
+ "step": 39
244
+ },
245
+ {
246
+ "epoch": 0.18,
247
+ "learning_rate": 1.981763790324295e-05,
248
+ "loss": 0.1933,
249
+ "step": 40
250
+ },
251
+ {
252
+ "epoch": 0.19,
253
+ "learning_rate": 1.9808435408568938e-05,
254
+ "loss": 0.19,
255
+ "step": 41
256
+ },
257
+ {
258
+ "epoch": 0.19,
259
+ "learning_rate": 1.9799008645582424e-05,
260
+ "loss": 0.2077,
261
+ "step": 42
262
+ },
263
+ {
264
+ "epoch": 0.2,
265
+ "learning_rate": 1.9789357829824842e-05,
266
+ "loss": 0.1683,
267
+ "step": 43
268
+ },
269
+ {
270
+ "epoch": 0.2,
271
+ "learning_rate": 1.9779483181960556e-05,
272
+ "loss": 0.173,
273
+ "step": 44
274
+ },
275
+ {
276
+ "epoch": 0.2,
277
+ "learning_rate": 1.976938492777182e-05,
278
+ "loss": 0.1938,
279
+ "step": 45
280
+ },
281
+ {
282
+ "epoch": 0.21,
283
+ "learning_rate": 1.97590632981536e-05,
284
+ "loss": 0.1655,
285
+ "step": 46
286
+ },
287
+ {
288
+ "epoch": 0.21,
289
+ "learning_rate": 1.9748518529108317e-05,
290
+ "loss": 0.1899,
291
+ "step": 47
292
+ },
293
+ {
294
+ "epoch": 0.22,
295
+ "learning_rate": 1.9737750861740434e-05,
296
+ "loss": 0.2034,
297
+ "step": 48
298
+ },
299
+ {
300
+ "epoch": 0.22,
301
+ "learning_rate": 1.9726760542250946e-05,
302
+ "loss": 0.176,
303
+ "step": 49
304
+ },
305
+ {
306
+ "epoch": 0.23,
307
+ "learning_rate": 1.971554782193176e-05,
308
+ "loss": 0.125,
309
+ "step": 50
310
+ },
311
+ {
312
+ "epoch": 0.23,
313
+ "learning_rate": 1.970411295715994e-05,
314
+ "loss": 0.1539,
315
+ "step": 51
316
+ },
317
+ {
318
+ "epoch": 0.24,
319
+ "learning_rate": 1.9692456209391845e-05,
320
+ "loss": 0.1794,
321
+ "step": 52
322
+ },
323
+ {
324
+ "epoch": 0.24,
325
+ "learning_rate": 1.9680577845157155e-05,
326
+ "loss": 0.1544,
327
+ "step": 53
328
+ },
329
+ {
330
+ "epoch": 0.25,
331
+ "learning_rate": 1.9668478136052776e-05,
332
+ "loss": 0.1948,
333
+ "step": 54
334
+ },
335
+ {
336
+ "epoch": 0.25,
337
+ "learning_rate": 1.9656157358736626e-05,
338
+ "loss": 0.1566,
339
+ "step": 55
340
+ },
341
+ {
342
+ "epoch": 0.25,
343
+ "learning_rate": 1.964361579492132e-05,
344
+ "loss": 0.1895,
345
+ "step": 56
346
+ },
347
+ {
348
+ "epoch": 0.26,
349
+ "learning_rate": 1.9630853731367715e-05,
350
+ "loss": 0.1456,
351
+ "step": 57
352
+ },
353
+ {
354
+ "epoch": 0.26,
355
+ "learning_rate": 1.961787145987835e-05,
356
+ "loss": 0.1425,
357
+ "step": 58
358
+ },
359
+ {
360
+ "epoch": 0.27,
361
+ "learning_rate": 1.9604669277290805e-05,
362
+ "loss": 0.1649,
363
+ "step": 59
364
+ },
365
+ {
366
+ "epoch": 0.27,
367
+ "learning_rate": 1.959124748547088e-05,
368
+ "loss": 0.1266,
369
+ "step": 60
370
+ },
371
+ {
372
+ "epoch": 0.28,
373
+ "learning_rate": 1.9577606391305705e-05,
374
+ "loss": 0.1112,
375
+ "step": 61
376
+ },
377
+ {
378
+ "epoch": 0.28,
379
+ "learning_rate": 1.956374630669672e-05,
380
+ "loss": 0.2214,
381
+ "step": 62
382
+ },
383
+ {
384
+ "epoch": 0.29,
385
+ "learning_rate": 1.9549667548552557e-05,
386
+ "loss": 0.1718,
387
+ "step": 63
388
+ },
389
+ {
390
+ "epoch": 0.29,
391
+ "learning_rate": 1.9535370438781766e-05,
392
+ "loss": 0.1172,
393
+ "step": 64
394
+ },
395
+ {
396
+ "epoch": 0.3,
397
+ "learning_rate": 1.95208553042855e-05,
398
+ "loss": 0.1136,
399
+ "step": 65
400
+ },
401
+ {
402
+ "epoch": 0.3,
403
+ "learning_rate": 1.950612247694998e-05,
404
+ "loss": 0.1382,
405
+ "step": 66
406
+ },
407
+ {
408
+ "epoch": 0.3,
409
+ "learning_rate": 1.9491172293638968e-05,
410
+ "loss": 0.1534,
411
+ "step": 67
412
+ },
413
+ {
414
+ "epoch": 0.31,
415
+ "learning_rate": 1.9476005096186017e-05,
416
+ "loss": 0.1223,
417
+ "step": 68
418
+ },
419
+ {
420
+ "epoch": 0.31,
421
+ "learning_rate": 1.946062123138668e-05,
422
+ "loss": 0.1115,
423
+ "step": 69
424
+ },
425
+ {
426
+ "epoch": 0.32,
427
+ "learning_rate": 1.9445021050990572e-05,
428
+ "loss": 0.144,
429
+ "step": 70
430
+ },
431
+ {
432
+ "epoch": 0.32,
433
+ "learning_rate": 1.9429204911693333e-05,
434
+ "loss": 0.1439,
435
+ "step": 71
436
+ },
437
+ {
438
+ "epoch": 0.33,
439
+ "learning_rate": 1.9413173175128472e-05,
440
+ "loss": 0.1694,
441
+ "step": 72
442
+ },
443
+ {
444
+ "epoch": 0.33,
445
+ "learning_rate": 1.9396926207859085e-05,
446
+ "loss": 0.1303,
447
+ "step": 73
448
+ },
449
+ {
450
+ "epoch": 0.34,
451
+ "learning_rate": 1.9380464381369493e-05,
452
+ "loss": 0.1308,
453
+ "step": 74
454
+ },
455
+ {
456
+ "epoch": 0.34,
457
+ "learning_rate": 1.936378807205673e-05,
458
+ "loss": 0.1064,
459
+ "step": 75
460
+ },
461
+ {
462
+ "epoch": 0.35,
463
+ "learning_rate": 1.9346897661221957e-05,
464
+ "loss": 0.1334,
465
+ "step": 76
466
+ },
467
+ {
468
+ "epoch": 0.35,
469
+ "learning_rate": 1.9329793535061724e-05,
470
+ "loss": 0.1245,
471
+ "step": 77
472
+ },
473
+ {
474
+ "epoch": 0.35,
475
+ "learning_rate": 1.931247608465915e-05,
476
+ "loss": 0.1271,
477
+ "step": 78
478
+ },
479
+ {
480
+ "epoch": 0.36,
481
+ "learning_rate": 1.9294945705974975e-05,
482
+ "loss": 0.1131,
483
+ "step": 79
484
+ },
485
+ {
486
+ "epoch": 0.36,
487
+ "learning_rate": 1.927720279983852e-05,
488
+ "loss": 0.1519,
489
+ "step": 80
490
+ },
491
+ {
492
+ "epoch": 0.37,
493
+ "learning_rate": 1.92592477719385e-05,
494
+ "loss": 0.1757,
495
+ "step": 81
496
+ },
497
+ {
498
+ "epoch": 0.37,
499
+ "learning_rate": 1.924108103281377e-05,
500
+ "loss": 0.1478,
501
+ "step": 82
502
+ },
503
+ {
504
+ "epoch": 0.38,
505
+ "learning_rate": 1.9222702997843928e-05,
506
+ "loss": 0.1007,
507
+ "step": 83
508
+ },
509
+ {
510
+ "epoch": 0.38,
511
+ "learning_rate": 1.9204114087239806e-05,
512
+ "loss": 0.1531,
513
+ "step": 84
514
+ },
515
+ {
516
+ "epoch": 0.39,
517
+ "learning_rate": 1.9185314726033893e-05,
518
+ "loss": 0.0998,
519
+ "step": 85
520
+ },
521
+ {
522
+ "epoch": 0.39,
523
+ "learning_rate": 1.916630534407058e-05,
524
+ "loss": 0.1236,
525
+ "step": 86
526
+ },
527
+ {
528
+ "epoch": 0.4,
529
+ "learning_rate": 1.914708637599636e-05,
530
+ "loss": 0.1078,
531
+ "step": 87
532
+ },
533
+ {
534
+ "epoch": 0.4,
535
+ "learning_rate": 1.9127658261249872e-05,
536
+ "loss": 0.1361,
537
+ "step": 88
538
+ },
539
+ {
540
+ "epoch": 0.4,
541
+ "learning_rate": 1.910802144405186e-05,
542
+ "loss": 0.1348,
543
+ "step": 89
544
+ },
545
+ {
546
+ "epoch": 0.41,
547
+ "learning_rate": 1.908817637339503e-05,
548
+ "loss": 0.1374,
549
+ "step": 90
550
+ },
551
+ {
552
+ "epoch": 0.41,
553
+ "learning_rate": 1.9068123503033752e-05,
554
+ "loss": 0.1645,
555
+ "step": 91
556
+ },
557
+ {
558
+ "epoch": 0.42,
559
+ "learning_rate": 1.9047863291473717e-05,
560
+ "loss": 0.1,
561
+ "step": 92
562
+ },
563
+ {
564
+ "epoch": 0.42,
565
+ "learning_rate": 1.902739620196143e-05,
566
+ "loss": 0.1438,
567
+ "step": 93
568
+ },
569
+ {
570
+ "epoch": 0.43,
571
+ "learning_rate": 1.900672270247363e-05,
572
+ "loss": 0.1164,
573
+ "step": 94
574
+ },
575
+ {
576
+ "epoch": 0.43,
577
+ "learning_rate": 1.898584326570659e-05,
578
+ "loss": 0.1417,
579
+ "step": 95
580
+ },
581
+ {
582
+ "epoch": 0.44,
583
+ "learning_rate": 1.8964758369065303e-05,
584
+ "loss": 0.1174,
585
+ "step": 96
586
+ },
587
+ {
588
+ "epoch": 0.44,
589
+ "learning_rate": 1.894346849465257e-05,
590
+ "loss": 0.0978,
591
+ "step": 97
592
+ },
593
+ {
594
+ "epoch": 0.45,
595
+ "learning_rate": 1.892197412925798e-05,
596
+ "loss": 0.1157,
597
+ "step": 98
598
+ },
599
+ {
600
+ "epoch": 0.45,
601
+ "learning_rate": 1.890027576434677e-05,
602
+ "loss": 0.108,
603
+ "step": 99
604
+ },
605
+ {
606
+ "epoch": 0.45,
607
+ "learning_rate": 1.8878373896048594e-05,
608
+ "loss": 0.1282,
609
+ "step": 100
610
+ },
611
+ {
612
+ "epoch": 0.46,
613
+ "learning_rate": 1.8856269025146182e-05,
614
+ "loss": 0.0976,
615
+ "step": 101
616
+ },
617
+ {
618
+ "epoch": 0.46,
619
+ "learning_rate": 1.8833961657063887e-05,
620
+ "loss": 0.1377,
621
+ "step": 102
622
+ },
623
+ {
624
+ "epoch": 0.47,
625
+ "learning_rate": 1.881145230185612e-05,
626
+ "loss": 0.1597,
627
+ "step": 103
628
+ },
629
+ {
630
+ "epoch": 0.47,
631
+ "learning_rate": 1.8788741474195706e-05,
632
+ "loss": 0.1306,
633
+ "step": 104
634
+ },
635
+ {
636
+ "epoch": 0.48,
637
+ "learning_rate": 1.8765829693362097e-05,
638
+ "loss": 0.1166,
639
+ "step": 105
640
+ },
641
+ {
642
+ "epoch": 0.48,
643
+ "learning_rate": 1.874271748322951e-05,
644
+ "loss": 0.1265,
645
+ "step": 106
646
+ },
647
+ {
648
+ "epoch": 0.49,
649
+ "learning_rate": 1.8719405372254947e-05,
650
+ "loss": 0.1464,
651
+ "step": 107
652
+ },
653
+ {
654
+ "epoch": 0.49,
655
+ "learning_rate": 1.869589389346611e-05,
656
+ "loss": 0.1258,
657
+ "step": 108
658
+ },
659
+ {
660
+ "epoch": 0.5,
661
+ "learning_rate": 1.8672183584449217e-05,
662
+ "loss": 0.1114,
663
+ "step": 109
664
+ },
665
+ {
666
+ "epoch": 0.5,
667
+ "learning_rate": 1.8648274987336706e-05,
668
+ "loss": 0.1234,
669
+ "step": 110
670
+ },
671
+ {
672
+ "epoch": 0.5,
673
+ "learning_rate": 1.8624168648794833e-05,
674
+ "loss": 0.1266,
675
+ "step": 111
676
+ },
677
+ {
678
+ "epoch": 0.51,
679
+ "learning_rate": 1.859986512001119e-05,
680
+ "loss": 0.1162,
681
+ "step": 112
682
+ },
683
+ {
684
+ "epoch": 0.51,
685
+ "learning_rate": 1.8575364956682097e-05,
686
+ "loss": 0.1484,
687
+ "step": 113
688
+ },
689
+ {
690
+ "epoch": 0.52,
691
+ "learning_rate": 1.8550668718999873e-05,
692
+ "loss": 0.127,
693
+ "step": 114
694
+ },
695
+ {
696
+ "epoch": 0.52,
697
+ "learning_rate": 1.8525776971640065e-05,
698
+ "loss": 0.1425,
699
+ "step": 115
700
+ },
701
+ {
702
+ "epoch": 0.53,
703
+ "learning_rate": 1.8500690283748502e-05,
704
+ "loss": 0.1416,
705
+ "step": 116
706
+ },
707
+ {
708
+ "epoch": 0.53,
709
+ "learning_rate": 1.8475409228928314e-05,
710
+ "loss": 0.1261,
711
+ "step": 117
712
+ },
713
+ {
714
+ "epoch": 0.54,
715
+ "learning_rate": 1.8449934385226784e-05,
716
+ "loss": 0.1208,
717
+ "step": 118
718
+ },
719
+ {
720
+ "epoch": 0.54,
721
+ "learning_rate": 1.842426633512215e-05,
722
+ "loss": 0.1326,
723
+ "step": 119
724
+ },
725
+ {
726
+ "epoch": 0.55,
727
+ "learning_rate": 1.83984056655103e-05,
728
+ "loss": 0.1141,
729
+ "step": 120
730
+ },
731
+ {
732
+ "epoch": 0.55,
733
+ "learning_rate": 1.837235296769131e-05,
734
+ "loss": 0.1275,
735
+ "step": 121
736
+ },
737
+ {
738
+ "epoch": 0.55,
739
+ "learning_rate": 1.834610883735597e-05,
740
+ "loss": 0.1379,
741
+ "step": 122
742
+ },
743
+ {
744
+ "epoch": 0.56,
745
+ "learning_rate": 1.831967387457214e-05,
746
+ "loss": 0.0989,
747
+ "step": 123
748
+ },
749
+ {
750
+ "epoch": 0.56,
751
+ "learning_rate": 1.8293048683771023e-05,
752
+ "loss": 0.1165,
753
+ "step": 124
754
+ },
755
+ {
756
+ "epoch": 0.57,
757
+ "learning_rate": 1.8266233873733376e-05,
758
+ "loss": 0.1221,
759
+ "step": 125
760
+ },
761
+ {
762
+ "epoch": 0.57,
763
+ "learning_rate": 1.8239230057575542e-05,
764
+ "loss": 0.0941,
765
+ "step": 126
766
+ },
767
+ {
768
+ "epoch": 0.58,
769
+ "learning_rate": 1.8212037852735487e-05,
770
+ "loss": 0.1147,
771
+ "step": 127
772
+ },
773
+ {
774
+ "epoch": 0.58,
775
+ "learning_rate": 1.8184657880958637e-05,
776
+ "loss": 0.1012,
777
+ "step": 128
778
+ },
779
+ {
780
+ "epoch": 0.59,
781
+ "learning_rate": 1.815709076828368e-05,
782
+ "loss": 0.0999,
783
+ "step": 129
784
+ },
785
+ {
786
+ "epoch": 0.59,
787
+ "learning_rate": 1.8129337145028257e-05,
788
+ "loss": 0.1083,
789
+ "step": 130
790
+ },
791
+ {
792
+ "epoch": 0.6,
793
+ "learning_rate": 1.810139764577454e-05,
794
+ "loss": 0.1,
795
+ "step": 131
796
+ },
797
+ {
798
+ "epoch": 0.6,
799
+ "learning_rate": 1.8073272909354727e-05,
800
+ "loss": 0.1283,
801
+ "step": 132
802
+ },
803
+ {
804
+ "epoch": 0.6,
805
+ "learning_rate": 1.8044963578836437e-05,
806
+ "loss": 0.1194,
807
+ "step": 133
808
+ },
809
+ {
810
+ "epoch": 0.61,
811
+ "learning_rate": 1.8016470301507995e-05,
812
+ "loss": 0.1098,
813
+ "step": 134
814
+ },
815
+ {
816
+ "epoch": 0.61,
817
+ "learning_rate": 1.798779372886365e-05,
818
+ "loss": 0.1099,
819
+ "step": 135
820
+ },
821
+ {
822
+ "epoch": 0.62,
823
+ "learning_rate": 1.7958934516588665e-05,
824
+ "loss": 0.0984,
825
+ "step": 136
826
+ },
827
+ {
828
+ "epoch": 0.62,
829
+ "learning_rate": 1.7929893324544333e-05,
830
+ "loss": 0.1062,
831
+ "step": 137
832
+ },
833
+ {
834
+ "epoch": 0.63,
835
+ "learning_rate": 1.7900670816752875e-05,
836
+ "loss": 0.1054,
837
+ "step": 138
838
+ },
839
+ {
840
+ "epoch": 0.63,
841
+ "learning_rate": 1.7871267661382278e-05,
842
+ "loss": 0.0968,
843
+ "step": 139
844
+ },
845
+ {
846
+ "epoch": 0.64,
847
+ "learning_rate": 1.7841684530731006e-05,
848
+ "loss": 0.0918,
849
+ "step": 140
850
+ },
851
+ {
852
+ "epoch": 0.64,
853
+ "learning_rate": 1.7811922101212622e-05,
854
+ "loss": 0.1491,
855
+ "step": 141
856
+ },
857
+ {
858
+ "epoch": 0.65,
859
+ "learning_rate": 1.778198105334034e-05,
860
+ "loss": 0.0946,
861
+ "step": 142
862
+ },
863
+ {
864
+ "epoch": 0.65,
865
+ "learning_rate": 1.775186207171144e-05,
866
+ "loss": 0.093,
867
+ "step": 143
868
+ },
869
+ {
870
+ "epoch": 0.65,
871
+ "learning_rate": 1.7721565844991643e-05,
872
+ "loss": 0.1105,
873
+ "step": 144
874
+ },
875
+ {
876
+ "epoch": 0.66,
877
+ "learning_rate": 1.7691093065899344e-05,
878
+ "loss": 0.095,
879
+ "step": 145
880
+ },
881
+ {
882
+ "epoch": 0.66,
883
+ "learning_rate": 1.766044443118978e-05,
884
+ "loss": 0.1255,
885
+ "step": 146
886
+ },
887
+ {
888
+ "epoch": 0.67,
889
+ "learning_rate": 1.7629620641639102e-05,
890
+ "loss": 0.0982,
891
+ "step": 147
892
+ },
893
+ {
894
+ "epoch": 0.67,
895
+ "learning_rate": 1.7598622402028344e-05,
896
+ "loss": 0.1035,
897
+ "step": 148
898
+ },
899
+ {
900
+ "epoch": 0.68,
901
+ "learning_rate": 1.756745042112731e-05,
902
+ "loss": 0.0932,
903
+ "step": 149
904
+ },
905
+ {
906
+ "epoch": 0.68,
907
+ "learning_rate": 1.753610541167838e-05,
908
+ "loss": 0.0954,
909
+ "step": 150
910
+ },
911
+ {
912
+ "epoch": 0.69,
913
+ "learning_rate": 1.75045880903802e-05,
914
+ "loss": 0.1083,
915
+ "step": 151
916
+ },
917
+ {
918
+ "epoch": 0.69,
919
+ "learning_rate": 1.74728991778713e-05,
920
+ "loss": 0.1025,
921
+ "step": 152
922
+ },
923
+ {
924
+ "epoch": 0.7,
925
+ "learning_rate": 1.744103939871361e-05,
926
+ "loss": 0.1337,
927
+ "step": 153
928
+ },
929
+ {
930
+ "epoch": 0.7,
931
+ "learning_rate": 1.7409009481375905e-05,
932
+ "loss": 0.1083,
933
+ "step": 154
934
+ },
935
+ {
936
+ "epoch": 0.7,
937
+ "learning_rate": 1.7376810158217142e-05,
938
+ "loss": 0.1003,
939
+ "step": 155
940
+ },
941
+ {
942
+ "epoch": 0.71,
943
+ "learning_rate": 1.7344442165469714e-05,
944
+ "loss": 0.1212,
945
+ "step": 156
946
+ },
947
+ {
948
+ "epoch": 0.71,
949
+ "learning_rate": 1.7311906243222613e-05,
950
+ "loss": 0.1267,
951
+ "step": 157
952
+ },
953
+ {
954
+ "epoch": 0.72,
955
+ "learning_rate": 1.7279203135404522e-05,
956
+ "loss": 0.1225,
957
+ "step": 158
958
+ },
959
+ {
960
+ "epoch": 0.72,
961
+ "learning_rate": 1.7246333589766786e-05,
962
+ "loss": 0.0981,
963
+ "step": 159
964
+ },
965
+ {
966
+ "epoch": 0.73,
967
+ "learning_rate": 1.7213298357866326e-05,
968
+ "loss": 0.1342,
969
+ "step": 160
970
+ },
971
+ {
972
+ "epoch": 0.73,
973
+ "learning_rate": 1.7180098195048458e-05,
974
+ "loss": 0.1031,
975
+ "step": 161
976
+ },
977
+ {
978
+ "epoch": 0.74,
979
+ "learning_rate": 1.7146733860429614e-05,
980
+ "loss": 0.1232,
981
+ "step": 162
982
+ },
983
+ {
984
+ "epoch": 0.74,
985
+ "learning_rate": 1.7113206116879983e-05,
986
+ "loss": 0.1193,
987
+ "step": 163
988
+ },
989
+ {
990
+ "epoch": 0.75,
991
+ "learning_rate": 1.7079515731006085e-05,
992
+ "loss": 0.0976,
993
+ "step": 164
994
+ },
995
+ {
996
+ "epoch": 0.75,
997
+ "learning_rate": 1.7045663473133215e-05,
998
+ "loss": 0.1021,
999
+ "step": 165
1000
+ },
1001
+ {
1002
+ "epoch": 0.75,
1003
+ "learning_rate": 1.7011650117287868e-05,
1004
+ "loss": 0.1116,
1005
+ "step": 166
1006
+ },
1007
+ {
1008
+ "epoch": 0.76,
1009
+ "learning_rate": 1.6977476441179993e-05,
1010
+ "loss": 0.1164,
1011
+ "step": 167
1012
+ },
1013
+ {
1014
+ "epoch": 0.76,
1015
+ "learning_rate": 1.6943143226185252e-05,
1016
+ "loss": 0.1145,
1017
+ "step": 168
1018
+ },
1019
+ {
1020
+ "epoch": 0.77,
1021
+ "learning_rate": 1.6908651257327138e-05,
1022
+ "loss": 0.0901,
1023
+ "step": 169
1024
+ },
1025
+ {
1026
+ "epoch": 0.77,
1027
+ "learning_rate": 1.6874001323259012e-05,
1028
+ "loss": 0.0792,
1029
+ "step": 170
1030
+ },
1031
+ {
1032
+ "epoch": 0.78,
1033
+ "learning_rate": 1.683919421624611e-05,
1034
+ "loss": 0.0992,
1035
+ "step": 171
1036
+ },
1037
+ {
1038
+ "epoch": 0.78,
1039
+ "learning_rate": 1.680423073214737e-05,
1040
+ "loss": 0.107,
1041
+ "step": 172
1042
+ },
1043
+ {
1044
+ "epoch": 0.79,
1045
+ "learning_rate": 1.67691116703973e-05,
1046
+ "loss": 0.1006,
1047
+ "step": 173
1048
+ },
1049
+ {
1050
+ "epoch": 0.79,
1051
+ "learning_rate": 1.6733837833987634e-05,
1052
+ "loss": 0.0988,
1053
+ "step": 174
1054
+ },
1055
+ {
1056
+ "epoch": 0.8,
1057
+ "learning_rate": 1.669841002944903e-05,
1058
+ "loss": 0.1153,
1059
+ "step": 175
1060
+ },
1061
+ {
1062
+ "epoch": 0.8,
1063
+ "learning_rate": 1.6662829066832595e-05,
1064
+ "loss": 0.1173,
1065
+ "step": 176
1066
+ },
1067
+ {
1068
+ "epoch": 0.8,
1069
+ "learning_rate": 1.6627095759691364e-05,
1070
+ "loss": 0.1,
1071
+ "step": 177
1072
+ },
1073
+ {
1074
+ "epoch": 0.81,
1075
+ "learning_rate": 1.659121092506171e-05,
1076
+ "loss": 0.1007,
1077
+ "step": 178
1078
+ },
1079
+ {
1080
+ "epoch": 0.81,
1081
+ "learning_rate": 1.6555175383444658e-05,
1082
+ "loss": 0.1124,
1083
+ "step": 179
1084
+ },
1085
+ {
1086
+ "epoch": 0.82,
1087
+ "learning_rate": 1.6518989958787126e-05,
1088
+ "loss": 0.1206,
1089
+ "step": 180
1090
+ },
1091
+ {
1092
+ "epoch": 0.82,
1093
+ "learning_rate": 1.648265547846308e-05,
1094
+ "loss": 0.0997,
1095
+ "step": 181
1096
+ },
1097
+ {
1098
+ "epoch": 0.83,
1099
+ "learning_rate": 1.6446172773254628e-05,
1100
+ "loss": 0.0865,
1101
+ "step": 182
1102
+ },
1103
+ {
1104
+ "epoch": 0.83,
1105
+ "learning_rate": 1.6409542677333007e-05,
1106
+ "loss": 0.1221,
1107
+ "step": 183
1108
+ },
1109
+ {
1110
+ "epoch": 0.84,
1111
+ "learning_rate": 1.6372766028239523e-05,
1112
+ "loss": 0.0963,
1113
+ "step": 184
1114
+ },
1115
+ {
1116
+ "epoch": 0.84,
1117
+ "learning_rate": 1.6335843666866388e-05,
1118
+ "loss": 0.1199,
1119
+ "step": 185
1120
+ },
1121
+ {
1122
+ "epoch": 0.85,
1123
+ "learning_rate": 1.6298776437437526e-05,
1124
+ "loss": 0.1115,
1125
+ "step": 186
1126
+ },
1127
+ {
1128
+ "epoch": 0.85,
1129
+ "learning_rate": 1.626156518748922e-05,
1130
+ "loss": 0.1016,
1131
+ "step": 187
1132
+ },
1133
+ {
1134
+ "epoch": 0.85,
1135
+ "learning_rate": 1.6224210767850773e-05,
1136
+ "loss": 0.1037,
1137
+ "step": 188
1138
+ },
1139
+ {
1140
+ "epoch": 0.86,
1141
+ "learning_rate": 1.6186714032625036e-05,
1142
+ "loss": 0.1056,
1143
+ "step": 189
1144
+ },
1145
+ {
1146
+ "epoch": 0.86,
1147
+ "learning_rate": 1.6149075839168886e-05,
1148
+ "loss": 0.1125,
1149
+ "step": 190
1150
+ },
1151
+ {
1152
+ "epoch": 0.87,
1153
+ "learning_rate": 1.611129704807362e-05,
1154
+ "loss": 0.0969,
1155
+ "step": 191
1156
+ },
1157
+ {
1158
+ "epoch": 0.87,
1159
+ "learning_rate": 1.6073378523145272e-05,
1160
+ "loss": 0.1198,
1161
+ "step": 192
1162
+ },
1163
+ {
1164
+ "epoch": 0.88,
1165
+ "learning_rate": 1.6035321131384872e-05,
1166
+ "loss": 0.1097,
1167
+ "step": 193
1168
+ },
1169
+ {
1170
+ "epoch": 0.88,
1171
+ "learning_rate": 1.599712574296862e-05,
1172
+ "loss": 0.0976,
1173
+ "step": 194
1174
+ },
1175
+ {
1176
+ "epoch": 0.89,
1177
+ "learning_rate": 1.595879323122798e-05,
1178
+ "loss": 0.0937,
1179
+ "step": 195
1180
+ },
1181
+ {
1182
+ "epoch": 0.89,
1183
+ "learning_rate": 1.592032447262973e-05,
1184
+ "loss": 0.1166,
1185
+ "step": 196
1186
+ },
1187
+ {
1188
+ "epoch": 0.9,
1189
+ "learning_rate": 1.5881720346755904e-05,
1190
+ "loss": 0.0905,
1191
+ "step": 197
1192
+ },
1193
+ {
1194
+ "epoch": 0.9,
1195
+ "learning_rate": 1.5842981736283686e-05,
1196
+ "loss": 0.0879,
1197
+ "step": 198
1198
+ },
1199
+ {
1200
+ "epoch": 0.9,
1201
+ "learning_rate": 1.5804109526965232e-05,
1202
+ "loss": 0.127,
1203
+ "step": 199
1204
+ },
1205
+ {
1206
+ "epoch": 0.91,
1207
+ "learning_rate": 1.576510460760741e-05,
1208
+ "loss": 0.1016,
1209
+ "step": 200
1210
+ },
1211
+ {
1212
+ "epoch": 0.91,
1213
+ "learning_rate": 1.572596787005149e-05,
1214
+ "loss": 0.0996,
1215
+ "step": 201
1216
+ },
1217
+ {
1218
+ "epoch": 0.92,
1219
+ "learning_rate": 1.568670020915274e-05,
1220
+ "loss": 0.1011,
1221
+ "step": 202
1222
+ },
1223
+ {
1224
+ "epoch": 0.92,
1225
+ "learning_rate": 1.564730252275996e-05,
1226
+ "loss": 0.089,
1227
+ "step": 203
1228
+ },
1229
+ {
1230
+ "epoch": 0.93,
1231
+ "learning_rate": 1.560777571169498e-05,
1232
+ "loss": 0.1112,
1233
+ "step": 204
1234
+ },
1235
+ {
1236
+ "epoch": 0.93,
1237
+ "learning_rate": 1.556812067973203e-05,
1238
+ "loss": 0.1025,
1239
+ "step": 205
1240
+ },
1241
+ {
1242
+ "epoch": 0.94,
1243
+ "learning_rate": 1.55283383335771e-05,
1244
+ "loss": 0.1064,
1245
+ "step": 206
1246
+ },
1247
+ {
1248
+ "epoch": 0.94,
1249
+ "learning_rate": 1.5488429582847194e-05,
1250
+ "loss": 0.0977,
1251
+ "step": 207
1252
+ },
1253
+ {
1254
+ "epoch": 0.95,
1255
+ "learning_rate": 1.5448395340049538e-05,
1256
+ "loss": 0.0963,
1257
+ "step": 208
1258
+ },
1259
+ {
1260
+ "epoch": 0.95,
1261
+ "learning_rate": 1.5408236520560707e-05,
1262
+ "loss": 0.089,
1263
+ "step": 209
1264
+ },
1265
+ {
1266
+ "epoch": 0.95,
1267
+ "learning_rate": 1.536795404260572e-05,
1268
+ "loss": 0.1107,
1269
+ "step": 210
1270
+ },
1271
+ {
1272
+ "epoch": 0.96,
1273
+ "learning_rate": 1.5327548827237008e-05,
1274
+ "loss": 0.1046,
1275
+ "step": 211
1276
+ },
1277
+ {
1278
+ "epoch": 0.96,
1279
+ "learning_rate": 1.528702179831338e-05,
1280
+ "loss": 0.0926,
1281
+ "step": 212
1282
+ },
1283
+ {
1284
+ "epoch": 0.97,
1285
+ "learning_rate": 1.5246373882478899e-05,
1286
+ "loss": 0.1009,
1287
+ "step": 213
1288
+ },
1289
+ {
1290
+ "epoch": 0.97,
1291
+ "learning_rate": 1.5205606009141683e-05,
1292
+ "loss": 0.0965,
1293
+ "step": 214
1294
+ },
1295
+ {
1296
+ "epoch": 0.98,
1297
+ "learning_rate": 1.5164719110452652e-05,
1298
+ "loss": 0.1004,
1299
+ "step": 215
1300
+ },
1301
+ {
1302
+ "epoch": 0.98,
1303
+ "learning_rate": 1.512371412128424e-05,
1304
+ "loss": 0.0771,
1305
+ "step": 216
1306
+ },
1307
+ {
1308
+ "epoch": 0.99,
1309
+ "learning_rate": 1.5082591979208977e-05,
1310
+ "loss": 0.0901,
1311
+ "step": 217
1312
+ },
1313
+ {
1314
+ "epoch": 0.99,
1315
+ "learning_rate": 1.5041353624478094e-05,
1316
+ "loss": 0.0993,
1317
+ "step": 218
1318
+ },
1319
+ {
1320
+ "epoch": 1.0,
1321
+ "learning_rate": 1.5000000000000002e-05,
1322
+ "loss": 0.1001,
1323
+ "step": 219
1324
+ },
1325
+ {
1326
+ "epoch": 1.0,
1327
+ "eval_loss": 0.10058634728193283,
1328
+ "eval_runtime": 1063.6983,
1329
+ "eval_samples_per_second": 4.412,
1330
+ "eval_steps_per_second": 2.206,
1331
+ "step": 219
1332
+ },
1333
+ {
1334
+ "epoch": 1.0,
1335
+ "learning_rate": 1.4958532051318731e-05,
1336
+ "loss": 0.091,
1337
+ "step": 220
1338
+ },
1339
+ {
1340
+ "epoch": 1.0,
1341
+ "learning_rate": 1.4916950726592322e-05,
1342
+ "loss": 0.0958,
1343
+ "step": 221
1344
+ },
1345
+ {
1346
+ "epoch": 1.01,
1347
+ "learning_rate": 1.4875256976571135e-05,
1348
+ "loss": 0.0909,
1349
+ "step": 222
1350
+ },
1351
+ {
1352
+ "epoch": 1.01,
1353
+ "learning_rate": 1.4833451754576122e-05,
1354
+ "loss": 0.0759,
1355
+ "step": 223
1356
+ },
1357
+ {
1358
+ "epoch": 1.02,
1359
+ "learning_rate": 1.4791536016477021e-05,
1360
+ "loss": 0.0987,
1361
+ "step": 224
1362
+ },
1363
+ {
1364
+ "epoch": 1.02,
1365
+ "learning_rate": 1.4749510720670506e-05,
1366
+ "loss": 0.0773,
1367
+ "step": 225
1368
+ },
1369
+ {
1370
+ "epoch": 1.03,
1371
+ "learning_rate": 1.4707376828058264e-05,
1372
+ "loss": 0.0942,
1373
+ "step": 226
1374
+ },
1375
+ {
1376
+ "epoch": 1.03,
1377
+ "learning_rate": 1.4665135302025036e-05,
1378
+ "loss": 0.081,
1379
+ "step": 227
1380
+ },
1381
+ {
1382
+ "epoch": 1.04,
1383
+ "learning_rate": 1.4622787108416585e-05,
1384
+ "loss": 0.0899,
1385
+ "step": 228
1386
+ },
1387
+ {
1388
+ "epoch": 1.04,
1389
+ "learning_rate": 1.4580333215517608e-05,
1390
+ "loss": 0.0945,
1391
+ "step": 229
1392
+ },
1393
+ {
1394
+ "epoch": 1.05,
1395
+ "learning_rate": 1.45377745940296e-05,
1396
+ "loss": 0.0787,
1397
+ "step": 230
1398
+ },
1399
+ {
1400
+ "epoch": 1.05,
1401
+ "learning_rate": 1.449511221704866e-05,
1402
+ "loss": 0.0965,
1403
+ "step": 231
1404
+ },
1405
+ {
1406
+ "epoch": 1.05,
1407
+ "learning_rate": 1.4452347060043239e-05,
1408
+ "loss": 0.0831,
1409
+ "step": 232
1410
+ },
1411
+ {
1412
+ "epoch": 1.06,
1413
+ "learning_rate": 1.4409480100831834e-05,
1414
+ "loss": 0.094,
1415
+ "step": 233
1416
+ },
1417
+ {
1418
+ "epoch": 1.06,
1419
+ "learning_rate": 1.4366512319560642e-05,
1420
+ "loss": 0.1029,
1421
+ "step": 234
1422
+ },
1423
+ {
1424
+ "epoch": 1.07,
1425
+ "learning_rate": 1.4323444698681126e-05,
1426
+ "loss": 0.0995,
1427
+ "step": 235
1428
+ },
1429
+ {
1430
+ "epoch": 1.07,
1431
+ "learning_rate": 1.428027822292758e-05,
1432
+ "loss": 0.0839,
1433
+ "step": 236
1434
+ },
1435
+ {
1436
+ "epoch": 1.08,
1437
+ "learning_rate": 1.423701387929459e-05,
1438
+ "loss": 0.0919,
1439
+ "step": 237
1440
+ },
1441
+ {
1442
+ "epoch": 1.08,
1443
+ "learning_rate": 1.419365265701448e-05,
1444
+ "loss": 0.091,
1445
+ "step": 238
1446
+ },
1447
+ {
1448
+ "epoch": 1.09,
1449
+ "learning_rate": 1.4150195547534686e-05,
1450
+ "loss": 0.1023,
1451
+ "step": 239
1452
+ },
1453
+ {
1454
+ "epoch": 1.09,
1455
+ "learning_rate": 1.4106643544495092e-05,
1456
+ "loss": 0.0821,
1457
+ "step": 240
1458
+ },
1459
+ {
1460
+ "epoch": 1.1,
1461
+ "learning_rate": 1.4062997643705308e-05,
1462
+ "loss": 0.0897,
1463
+ "step": 241
1464
+ },
1465
+ {
1466
+ "epoch": 1.1,
1467
+ "learning_rate": 1.4019258843121893e-05,
1468
+ "loss": 0.106,
1469
+ "step": 242
1470
+ },
1471
+ {
1472
+ "epoch": 1.1,
1473
+ "learning_rate": 1.3975428142825562e-05,
1474
+ "loss": 0.0902,
1475
+ "step": 243
1476
+ },
1477
+ {
1478
+ "epoch": 1.11,
1479
+ "learning_rate": 1.3931506544998283e-05,
1480
+ "loss": 0.0952,
1481
+ "step": 244
1482
+ },
1483
+ {
1484
+ "epoch": 1.11,
1485
+ "learning_rate": 1.3887495053900398e-05,
1486
+ "loss": 0.1105,
1487
+ "step": 245
1488
+ },
1489
+ {
1490
+ "epoch": 1.12,
1491
+ "learning_rate": 1.3843394675847635e-05,
1492
+ "loss": 0.0892,
1493
+ "step": 246
1494
+ },
1495
+ {
1496
+ "epoch": 1.12,
1497
+ "learning_rate": 1.3799206419188104e-05,
1498
+ "loss": 0.0953,
1499
+ "step": 247
1500
+ },
1501
+ {
1502
+ "epoch": 1.13,
1503
+ "learning_rate": 1.3754931294279264e-05,
1504
+ "loss": 0.0891,
1505
+ "step": 248
1506
+ },
1507
+ {
1508
+ "epoch": 1.13,
1509
+ "learning_rate": 1.3710570313464778e-05,
1510
+ "loss": 0.0972,
1511
+ "step": 249
1512
+ },
1513
+ {
1514
+ "epoch": 1.14,
1515
+ "learning_rate": 1.3666124491051408e-05,
1516
+ "loss": 0.1029,
1517
+ "step": 250
1518
+ },
1519
+ {
1520
+ "epoch": 1.14,
1521
+ "learning_rate": 1.3621594843285801e-05,
1522
+ "loss": 0.0975,
1523
+ "step": 251
1524
+ },
1525
+ {
1526
+ "epoch": 1.15,
1527
+ "learning_rate": 1.3576982388331258e-05,
1528
+ "loss": 0.0863,
1529
+ "step": 252
1530
+ },
1531
+ {
1532
+ "epoch": 1.15,
1533
+ "learning_rate": 1.3532288146244446e-05,
1534
+ "loss": 0.0879,
1535
+ "step": 253
1536
+ },
1537
+ {
1538
+ "epoch": 1.15,
1539
+ "learning_rate": 1.3487513138952092e-05,
1540
+ "loss": 0.0879,
1541
+ "step": 254
1542
+ },
1543
+ {
1544
+ "epoch": 1.16,
1545
+ "learning_rate": 1.3442658390227604e-05,
1546
+ "loss": 0.1132,
1547
+ "step": 255
1548
+ },
1549
+ {
1550
+ "epoch": 1.16,
1551
+ "learning_rate": 1.3397724925667657e-05,
1552
+ "loss": 0.0877,
1553
+ "step": 256
1554
+ },
1555
+ {
1556
+ "epoch": 1.17,
1557
+ "learning_rate": 1.3352713772668766e-05,
1558
+ "loss": 0.0793,
1559
+ "step": 257
1560
+ },
1561
+ {
1562
+ "epoch": 1.17,
1563
+ "learning_rate": 1.3307625960403763e-05,
1564
+ "loss": 0.0964,
1565
+ "step": 258
1566
+ },
1567
+ {
1568
+ "epoch": 1.18,
1569
+ "learning_rate": 1.3262462519798294e-05,
1570
+ "loss": 0.1016,
1571
+ "step": 259
1572
+ },
1573
+ {
1574
+ "epoch": 1.18,
1575
+ "learning_rate": 1.321722448350723e-05,
1576
+ "loss": 0.0865,
1577
+ "step": 260
1578
+ },
1579
+ {
1580
+ "epoch": 1.19,
1581
+ "learning_rate": 1.3171912885891063e-05,
1582
+ "loss": 0.0847,
1583
+ "step": 261
1584
+ },
1585
+ {
1586
+ "epoch": 1.19,
1587
+ "learning_rate": 1.3126528762992248e-05,
1588
+ "loss": 0.0942,
1589
+ "step": 262
1590
+ },
1591
+ {
1592
+ "epoch": 1.2,
1593
+ "learning_rate": 1.3081073152511525e-05,
1594
+ "loss": 0.0873,
1595
+ "step": 263
1596
+ },
1597
+ {
1598
+ "epoch": 1.2,
1599
+ "learning_rate": 1.3035547093784187e-05,
1600
+ "loss": 0.1064,
1601
+ "step": 264
1602
+ },
1603
+ {
1604
+ "epoch": 1.2,
1605
+ "learning_rate": 1.2989951627756306e-05,
1606
+ "loss": 0.0818,
1607
+ "step": 265
1608
+ },
1609
+ {
1610
+ "epoch": 1.21,
1611
+ "learning_rate": 1.2944287796960949e-05,
1612
+ "loss": 0.0976,
1613
+ "step": 266
1614
+ },
1615
+ {
1616
+ "epoch": 1.21,
1617
+ "learning_rate": 1.2898556645494327e-05,
1618
+ "loss": 0.0864,
1619
+ "step": 267
1620
+ },
1621
+ {
1622
+ "epoch": 1.22,
1623
+ "learning_rate": 1.2852759218991935e-05,
1624
+ "loss": 0.0918,
1625
+ "step": 268
1626
+ },
1627
+ {
1628
+ "epoch": 1.22,
1629
+ "learning_rate": 1.2806896564604627e-05,
1630
+ "loss": 0.0966,
1631
+ "step": 269
1632
+ },
1633
+ {
1634
+ "epoch": 1.23,
1635
+ "learning_rate": 1.2760969730974692e-05,
1636
+ "loss": 0.0872,
1637
+ "step": 270
1638
+ },
1639
+ {
1640
+ "epoch": 1.23,
1641
+ "learning_rate": 1.2714979768211854e-05,
1642
+ "loss": 0.0814,
1643
+ "step": 271
1644
+ },
1645
+ {
1646
+ "epoch": 1.24,
1647
+ "learning_rate": 1.2668927727869292e-05,
1648
+ "loss": 0.0898,
1649
+ "step": 272
1650
+ },
1651
+ {
1652
+ "epoch": 1.24,
1653
+ "learning_rate": 1.2622814662919562e-05,
1654
+ "loss": 0.0823,
1655
+ "step": 273
1656
+ },
1657
+ {
1658
+ "epoch": 1.25,
1659
+ "learning_rate": 1.2576641627730548e-05,
1660
+ "loss": 0.0926,
1661
+ "step": 274
1662
+ },
1663
+ {
1664
+ "epoch": 1.25,
1665
+ "learning_rate": 1.2530409678041342e-05,
1666
+ "loss": 0.0844,
1667
+ "step": 275
1668
+ },
1669
+ {
1670
+ "epoch": 1.25,
1671
+ "learning_rate": 1.2484119870938102e-05,
1672
+ "loss": 0.0949,
1673
+ "step": 276
1674
+ },
1675
+ {
1676
+ "epoch": 1.26,
1677
+ "learning_rate": 1.2437773264829898e-05,
1678
+ "loss": 0.087,
1679
+ "step": 277
1680
+ },
1681
+ {
1682
+ "epoch": 1.26,
1683
+ "learning_rate": 1.2391370919424485e-05,
1684
+ "loss": 0.0862,
1685
+ "step": 278
1686
+ },
1687
+ {
1688
+ "epoch": 1.27,
1689
+ "learning_rate": 1.2344913895704099e-05,
1690
+ "loss": 0.1098,
1691
+ "step": 279
1692
+ },
1693
+ {
1694
+ "epoch": 1.27,
1695
+ "learning_rate": 1.2298403255901185e-05,
1696
+ "loss": 0.0773,
1697
+ "step": 280
1698
+ },
1699
+ {
1700
+ "epoch": 1.28,
1701
+ "learning_rate": 1.2251840063474108e-05,
1702
+ "loss": 0.0907,
1703
+ "step": 281
1704
+ },
1705
+ {
1706
+ "epoch": 1.28,
1707
+ "learning_rate": 1.2205225383082844e-05,
1708
+ "loss": 0.1035,
1709
+ "step": 282
1710
+ },
1711
+ {
1712
+ "epoch": 1.29,
1713
+ "learning_rate": 1.2158560280564627e-05,
1714
+ "loss": 0.0833,
1715
+ "step": 283
1716
+ },
1717
+ {
1718
+ "epoch": 1.29,
1719
+ "learning_rate": 1.2111845822909596e-05,
1720
+ "loss": 0.1147,
1721
+ "step": 284
1722
+ },
1723
+ {
1724
+ "epoch": 1.3,
1725
+ "learning_rate": 1.2065083078236375e-05,
1726
+ "loss": 0.0753,
1727
+ "step": 285
1728
+ },
1729
+ {
1730
+ "epoch": 1.3,
1731
+ "learning_rate": 1.2018273115767673e-05,
1732
+ "loss": 0.1022,
1733
+ "step": 286
1734
+ },
1735
+ {
1736
+ "epoch": 1.3,
1737
+ "learning_rate": 1.1971417005805818e-05,
1738
+ "loss": 0.0887,
1739
+ "step": 287
1740
+ },
1741
+ {
1742
+ "epoch": 1.31,
1743
+ "learning_rate": 1.19245158197083e-05,
1744
+ "loss": 0.0874,
1745
+ "step": 288
1746
+ },
1747
+ {
1748
+ "epoch": 1.31,
1749
+ "learning_rate": 1.1877570629863267e-05,
1750
+ "loss": 0.0867,
1751
+ "step": 289
1752
+ },
1753
+ {
1754
+ "epoch": 1.32,
1755
+ "learning_rate": 1.1830582509664997e-05,
1756
+ "loss": 0.0775,
1757
+ "step": 290
1758
+ },
1759
+ {
1760
+ "epoch": 1.32,
1761
+ "learning_rate": 1.1783552533489372e-05,
1762
+ "loss": 0.0743,
1763
+ "step": 291
1764
+ },
1765
+ {
1766
+ "epoch": 1.33,
1767
+ "learning_rate": 1.1736481776669307e-05,
1768
+ "loss": 0.1133,
1769
+ "step": 292
1770
+ },
1771
+ {
1772
+ "epoch": 1.33,
1773
+ "learning_rate": 1.1689371315470151e-05,
1774
+ "loss": 0.0895,
1775
+ "step": 293
1776
+ },
1777
+ {
1778
+ "epoch": 1.34,
1779
+ "learning_rate": 1.164222222706509e-05,
1780
+ "loss": 0.0853,
1781
+ "step": 294
1782
+ },
1783
+ {
1784
+ "epoch": 1.34,
1785
+ "learning_rate": 1.1595035589510522e-05,
1786
+ "loss": 0.1079,
1787
+ "step": 295
1788
+ },
1789
+ {
1790
+ "epoch": 1.35,
1791
+ "learning_rate": 1.1547812481721387e-05,
1792
+ "loss": 0.098,
1793
+ "step": 296
1794
+ },
1795
+ {
1796
+ "epoch": 1.35,
1797
+ "learning_rate": 1.1500553983446527e-05,
1798
+ "loss": 0.0954,
1799
+ "step": 297
1800
+ },
1801
+ {
1802
+ "epoch": 1.35,
1803
+ "learning_rate": 1.1453261175243972e-05,
1804
+ "loss": 0.0821,
1805
+ "step": 298
1806
+ },
1807
+ {
1808
+ "epoch": 1.36,
1809
+ "learning_rate": 1.140593513845624e-05,
1810
+ "loss": 0.0968,
1811
+ "step": 299
1812
+ },
1813
+ {
1814
+ "epoch": 1.36,
1815
+ "learning_rate": 1.135857695518563e-05,
1816
+ "loss": 0.0868,
1817
+ "step": 300
1818
+ },
1819
+ {
1820
+ "epoch": 1.37,
1821
+ "learning_rate": 1.1311187708269442e-05,
1822
+ "loss": 0.0861,
1823
+ "step": 301
1824
+ },
1825
+ {
1826
+ "epoch": 1.37,
1827
+ "learning_rate": 1.1263768481255264e-05,
1828
+ "loss": 0.0734,
1829
+ "step": 302
1830
+ },
1831
+ {
1832
+ "epoch": 1.38,
1833
+ "learning_rate": 1.1216320358376158e-05,
1834
+ "loss": 0.1116,
1835
+ "step": 303
1836
+ },
1837
+ {
1838
+ "epoch": 1.38,
1839
+ "learning_rate": 1.1168844424525902e-05,
1840
+ "loss": 0.0836,
1841
+ "step": 304
1842
+ },
1843
+ {
1844
+ "epoch": 1.39,
1845
+ "learning_rate": 1.1121341765234146e-05,
1846
+ "loss": 0.0913,
1847
+ "step": 305
1848
+ },
1849
+ {
1850
+ "epoch": 1.39,
1851
+ "learning_rate": 1.1073813466641633e-05,
1852
+ "loss": 0.0841,
1853
+ "step": 306
1854
+ },
1855
+ {
1856
+ "epoch": 1.4,
1857
+ "learning_rate": 1.1026260615475333e-05,
1858
+ "loss": 0.094,
1859
+ "step": 307
1860
+ },
1861
+ {
1862
+ "epoch": 1.4,
1863
+ "learning_rate": 1.0978684299023608e-05,
1864
+ "loss": 0.0954,
1865
+ "step": 308
1866
+ },
1867
+ {
1868
+ "epoch": 1.4,
1869
+ "learning_rate": 1.0931085605111354e-05,
1870
+ "loss": 0.0892,
1871
+ "step": 309
1872
+ },
1873
+ {
1874
+ "epoch": 1.41,
1875
+ "learning_rate": 1.088346562207512e-05,
1876
+ "loss": 0.1037,
1877
+ "step": 310
1878
+ },
1879
+ {
1880
+ "epoch": 1.41,
1881
+ "learning_rate": 1.0835825438738232e-05,
1882
+ "loss": 0.0885,
1883
+ "step": 311
1884
+ },
1885
+ {
1886
+ "epoch": 1.42,
1887
+ "learning_rate": 1.0788166144385888e-05,
1888
+ "loss": 0.0872,
1889
+ "step": 312
1890
+ },
1891
+ {
1892
+ "epoch": 1.42,
1893
+ "learning_rate": 1.0740488828740258e-05,
1894
+ "loss": 0.0845,
1895
+ "step": 313
1896
+ },
1897
+ {
1898
+ "epoch": 1.43,
1899
+ "learning_rate": 1.0692794581935566e-05,
1900
+ "loss": 0.1027,
1901
+ "step": 314
1902
+ },
1903
+ {
1904
+ "epoch": 1.43,
1905
+ "learning_rate": 1.0645084494493166e-05,
1906
+ "loss": 0.1171,
1907
+ "step": 315
1908
+ },
1909
+ {
1910
+ "epoch": 1.44,
1911
+ "learning_rate": 1.0597359657296602e-05,
1912
+ "loss": 0.1043,
1913
+ "step": 316
1914
+ },
1915
+ {
1916
+ "epoch": 1.44,
1917
+ "learning_rate": 1.054962116156667e-05,
1918
+ "loss": 0.0894,
1919
+ "step": 317
1920
+ },
1921
+ {
1922
+ "epoch": 1.45,
1923
+ "learning_rate": 1.0501870098836473e-05,
1924
+ "loss": 0.0916,
1925
+ "step": 318
1926
+ },
1927
+ {
1928
+ "epoch": 1.45,
1929
+ "learning_rate": 1.0454107560926444e-05,
1930
+ "loss": 0.0836,
1931
+ "step": 319
1932
+ },
1933
+ {
1934
+ "epoch": 1.45,
1935
+ "learning_rate": 1.0406334639919404e-05,
1936
+ "loss": 0.0926,
1937
+ "step": 320
1938
+ },
1939
+ {
1940
+ "epoch": 1.46,
1941
+ "learning_rate": 1.0358552428135576e-05,
1942
+ "loss": 0.1158,
1943
+ "step": 321
1944
+ },
1945
+ {
1946
+ "epoch": 1.46,
1947
+ "learning_rate": 1.031076201810762e-05,
1948
+ "loss": 0.0906,
1949
+ "step": 322
1950
+ },
1951
+ {
1952
+ "epoch": 1.47,
1953
+ "learning_rate": 1.0262964502555643e-05,
1954
+ "loss": 0.093,
1955
+ "step": 323
1956
+ },
1957
+ {
1958
+ "epoch": 1.47,
1959
+ "learning_rate": 1.0215160974362224e-05,
1960
+ "loss": 0.0844,
1961
+ "step": 324
1962
+ },
1963
+ {
1964
+ "epoch": 1.48,
1965
+ "learning_rate": 1.0167352526547416e-05,
1966
+ "loss": 0.0869,
1967
+ "step": 325
1968
+ },
1969
+ {
1970
+ "epoch": 1.48,
1971
+ "learning_rate": 1.0119540252243755e-05,
1972
+ "loss": 0.0916,
1973
+ "step": 326
1974
+ },
1975
+ {
1976
+ "epoch": 1.49,
1977
+ "learning_rate": 1.0071725244671281e-05,
1978
+ "loss": 0.096,
1979
+ "step": 327
1980
+ },
1981
+ {
1982
+ "epoch": 1.49,
1983
+ "learning_rate": 1.0023908597112514e-05,
1984
+ "loss": 0.0859,
1985
+ "step": 328
1986
+ },
1987
+ {
1988
+ "epoch": 1.5,
1989
+ "learning_rate": 9.976091402887487e-06,
1990
+ "loss": 0.1097,
1991
+ "step": 329
1992
+ },
1993
+ {
1994
+ "epoch": 1.5,
1995
+ "learning_rate": 9.928274755328724e-06,
1996
+ "loss": 0.077,
1997
+ "step": 330
1998
+ },
1999
+ {
2000
+ "epoch": 1.5,
2001
+ "learning_rate": 9.880459747756247e-06,
2002
+ "loss": 0.0881,
2003
+ "step": 331
2004
+ },
2005
+ {
2006
+ "epoch": 1.51,
2007
+ "learning_rate": 9.83264747345259e-06,
2008
+ "loss": 0.0955,
2009
+ "step": 332
2010
+ },
2011
+ {
2012
+ "epoch": 1.51,
2013
+ "learning_rate": 9.78483902563778e-06,
2014
+ "loss": 0.0815,
2015
+ "step": 333
2016
+ },
2017
+ {
2018
+ "epoch": 1.52,
2019
+ "learning_rate": 9.737035497444362e-06,
2020
+ "loss": 0.0778,
2021
+ "step": 334
2022
+ },
2023
+ {
2024
+ "epoch": 1.52,
2025
+ "learning_rate": 9.689237981892381e-06,
2026
+ "loss": 0.0894,
2027
+ "step": 335
2028
+ },
2029
+ {
2030
+ "epoch": 1.53,
2031
+ "learning_rate": 9.641447571864429e-06,
2032
+ "loss": 0.0892,
2033
+ "step": 336
2034
+ },
2035
+ {
2036
+ "epoch": 1.53,
2037
+ "learning_rate": 9.5936653600806e-06,
2038
+ "loss": 0.0925,
2039
+ "step": 337
2040
+ },
2041
+ {
2042
+ "epoch": 1.54,
2043
+ "learning_rate": 9.545892439073561e-06,
2044
+ "loss": 0.1305,
2045
+ "step": 338
2046
+ },
2047
+ {
2048
+ "epoch": 1.54,
2049
+ "learning_rate": 9.49812990116353e-06,
2050
+ "loss": 0.1104,
2051
+ "step": 339
2052
+ },
2053
+ {
2054
+ "epoch": 1.55,
2055
+ "learning_rate": 9.450378838433332e-06,
2056
+ "loss": 0.0779,
2057
+ "step": 340
2058
+ },
2059
+ {
2060
+ "epoch": 1.55,
2061
+ "learning_rate": 9.402640342703401e-06,
2062
+ "loss": 0.0886,
2063
+ "step": 341
2064
+ },
2065
+ {
2066
+ "epoch": 1.55,
2067
+ "learning_rate": 9.354915505506839e-06,
2068
+ "loss": 0.1002,
2069
+ "step": 342
2070
+ },
2071
+ {
2072
+ "epoch": 1.56,
2073
+ "learning_rate": 9.307205418064436e-06,
2074
+ "loss": 0.0833,
2075
+ "step": 343
2076
+ },
2077
+ {
2078
+ "epoch": 1.56,
2079
+ "learning_rate": 9.259511171259747e-06,
2080
+ "loss": 0.0915,
2081
+ "step": 344
2082
+ },
2083
+ {
2084
+ "epoch": 1.57,
2085
+ "learning_rate": 9.211833855614115e-06,
2086
+ "loss": 0.0887,
2087
+ "step": 345
2088
+ },
2089
+ {
2090
+ "epoch": 1.57,
2091
+ "learning_rate": 9.164174561261771e-06,
2092
+ "loss": 0.0946,
2093
+ "step": 346
2094
+ },
2095
+ {
2096
+ "epoch": 1.58,
2097
+ "learning_rate": 9.116534377924882e-06,
2098
+ "loss": 0.0942,
2099
+ "step": 347
2100
+ },
2101
+ {
2102
+ "epoch": 1.58,
2103
+ "learning_rate": 9.068914394888651e-06,
2104
+ "loss": 0.0901,
2105
+ "step": 348
2106
+ },
2107
+ {
2108
+ "epoch": 1.59,
2109
+ "learning_rate": 9.021315700976397e-06,
2110
+ "loss": 0.0811,
2111
+ "step": 349
2112
+ },
2113
+ {
2114
+ "epoch": 1.59,
2115
+ "learning_rate": 8.973739384524674e-06,
2116
+ "loss": 0.0924,
2117
+ "step": 350
2118
+ },
2119
+ {
2120
+ "epoch": 1.6,
2121
+ "learning_rate": 8.92618653335837e-06,
2122
+ "loss": 0.089,
2123
+ "step": 351
2124
+ },
2125
+ {
2126
+ "epoch": 1.6,
2127
+ "learning_rate": 8.87865823476586e-06,
2128
+ "loss": 0.0971,
2129
+ "step": 352
2130
+ },
2131
+ {
2132
+ "epoch": 1.6,
2133
+ "learning_rate": 8.831155575474103e-06,
2134
+ "loss": 0.0826,
2135
+ "step": 353
2136
+ },
2137
+ {
2138
+ "epoch": 1.61,
2139
+ "learning_rate": 8.783679641623845e-06,
2140
+ "loss": 0.093,
2141
+ "step": 354
2142
+ },
2143
+ {
2144
+ "epoch": 1.61,
2145
+ "learning_rate": 8.73623151874474e-06,
2146
+ "loss": 0.1005,
2147
+ "step": 355
2148
+ },
2149
+ {
2150
+ "epoch": 1.62,
2151
+ "learning_rate": 8.688812291730565e-06,
2152
+ "loss": 0.0839,
2153
+ "step": 356
2154
+ },
2155
+ {
2156
+ "epoch": 1.62,
2157
+ "learning_rate": 8.641423044814375e-06,
2158
+ "loss": 0.0867,
2159
+ "step": 357
2160
+ },
2161
+ {
2162
+ "epoch": 1.63,
2163
+ "learning_rate": 8.594064861543761e-06,
2164
+ "loss": 0.1104,
2165
+ "step": 358
2166
+ },
2167
+ {
2168
+ "epoch": 1.63,
2169
+ "learning_rate": 8.54673882475603e-06,
2170
+ "loss": 0.084,
2171
+ "step": 359
2172
+ },
2173
+ {
2174
+ "epoch": 1.64,
2175
+ "learning_rate": 8.499446016553475e-06,
2176
+ "loss": 0.0751,
2177
+ "step": 360
2178
+ },
2179
+ {
2180
+ "epoch": 1.64,
2181
+ "learning_rate": 8.452187518278615e-06,
2182
+ "loss": 0.1002,
2183
+ "step": 361
2184
+ },
2185
+ {
2186
+ "epoch": 1.65,
2187
+ "learning_rate": 8.404964410489485e-06,
2188
+ "loss": 0.0707,
2189
+ "step": 362
2190
+ },
2191
+ {
2192
+ "epoch": 1.65,
2193
+ "learning_rate": 8.357777772934914e-06,
2194
+ "loss": 0.0877,
2195
+ "step": 363
2196
+ },
2197
+ {
2198
+ "epoch": 1.65,
2199
+ "learning_rate": 8.310628684529856e-06,
2200
+ "loss": 0.0888,
2201
+ "step": 364
2202
+ },
2203
+ {
2204
+ "epoch": 1.66,
2205
+ "learning_rate": 8.263518223330698e-06,
2206
+ "loss": 0.0877,
2207
+ "step": 365
2208
+ },
2209
+ {
2210
+ "epoch": 1.66,
2211
+ "learning_rate": 8.216447466510633e-06,
2212
+ "loss": 0.0852,
2213
+ "step": 366
2214
+ },
2215
+ {
2216
+ "epoch": 1.67,
2217
+ "learning_rate": 8.169417490335008e-06,
2218
+ "loss": 0.082,
2219
+ "step": 367
2220
+ },
2221
+ {
2222
+ "epoch": 1.67,
2223
+ "learning_rate": 8.12242937013674e-06,
2224
+ "loss": 0.0881,
2225
+ "step": 368
2226
+ },
2227
+ {
2228
+ "epoch": 1.68,
2229
+ "learning_rate": 8.075484180291702e-06,
2230
+ "loss": 0.0766,
2231
+ "step": 369
2232
+ },
2233
+ {
2234
+ "epoch": 1.68,
2235
+ "learning_rate": 8.028582994194185e-06,
2236
+ "loss": 0.0882,
2237
+ "step": 370
2238
+ },
2239
+ {
2240
+ "epoch": 1.69,
2241
+ "learning_rate": 7.981726884232328e-06,
2242
+ "loss": 0.103,
2243
+ "step": 371
2244
+ },
2245
+ {
2246
+ "epoch": 1.69,
2247
+ "learning_rate": 7.93491692176363e-06,
2248
+ "loss": 0.1006,
2249
+ "step": 372
2250
+ },
2251
+ {
2252
+ "epoch": 1.7,
2253
+ "learning_rate": 7.888154177090406e-06,
2254
+ "loss": 0.0961,
2255
+ "step": 373
2256
+ },
2257
+ {
2258
+ "epoch": 1.7,
2259
+ "learning_rate": 7.841439719435378e-06,
2260
+ "loss": 0.1091,
2261
+ "step": 374
2262
+ },
2263
+ {
2264
+ "epoch": 1.7,
2265
+ "learning_rate": 7.79477461691716e-06,
2266
+ "loss": 0.0939,
2267
+ "step": 375
2268
+ },
2269
+ {
2270
+ "epoch": 1.71,
2271
+ "learning_rate": 7.748159936525896e-06,
2272
+ "loss": 0.092,
2273
+ "step": 376
2274
+ },
2275
+ {
2276
+ "epoch": 1.71,
2277
+ "learning_rate": 7.701596744098818e-06,
2278
+ "loss": 0.1116,
2279
+ "step": 377
2280
+ },
2281
+ {
2282
+ "epoch": 1.72,
2283
+ "learning_rate": 7.655086104295904e-06,
2284
+ "loss": 0.0849,
2285
+ "step": 378
2286
+ },
2287
+ {
2288
+ "epoch": 1.72,
2289
+ "learning_rate": 7.608629080575518e-06,
2290
+ "loss": 0.0696,
2291
+ "step": 379
2292
+ },
2293
+ {
2294
+ "epoch": 1.73,
2295
+ "learning_rate": 7.5622267351701065e-06,
2296
+ "loss": 0.0829,
2297
+ "step": 380
2298
+ },
2299
+ {
2300
+ "epoch": 1.73,
2301
+ "learning_rate": 7.5158801290619e-06,
2302
+ "loss": 0.0976,
2303
+ "step": 381
2304
+ },
2305
+ {
2306
+ "epoch": 1.74,
2307
+ "learning_rate": 7.469590321958663e-06,
2308
+ "loss": 0.0943,
2309
+ "step": 382
2310
+ },
2311
+ {
2312
+ "epoch": 1.74,
2313
+ "learning_rate": 7.423358372269456e-06,
2314
+ "loss": 0.0843,
2315
+ "step": 383
2316
+ },
2317
+ {
2318
+ "epoch": 1.75,
2319
+ "learning_rate": 7.377185337080443e-06,
2320
+ "loss": 0.0894,
2321
+ "step": 384
2322
+ },
2323
+ {
2324
+ "epoch": 1.75,
2325
+ "learning_rate": 7.331072272130713e-06,
2326
+ "loss": 0.089,
2327
+ "step": 385
2328
+ },
2329
+ {
2330
+ "epoch": 1.75,
2331
+ "learning_rate": 7.285020231788149e-06,
2332
+ "loss": 0.0771,
2333
+ "step": 386
2334
+ },
2335
+ {
2336
+ "epoch": 1.76,
2337
+ "learning_rate": 7.239030269025311e-06,
2338
+ "loss": 0.0879,
2339
+ "step": 387
2340
+ },
2341
+ {
2342
+ "epoch": 1.76,
2343
+ "learning_rate": 7.193103435395378e-06,
2344
+ "loss": 0.1241,
2345
+ "step": 388
2346
+ },
2347
+ {
2348
+ "epoch": 1.77,
2349
+ "learning_rate": 7.147240781008068e-06,
2350
+ "loss": 0.0782,
2351
+ "step": 389
2352
+ },
2353
+ {
2354
+ "epoch": 1.77,
2355
+ "learning_rate": 7.1014433545056785e-06,
2356
+ "loss": 0.0958,
2357
+ "step": 390
2358
+ },
2359
+ {
2360
+ "epoch": 1.78,
2361
+ "learning_rate": 7.0557122030390545e-06,
2362
+ "loss": 0.0877,
2363
+ "step": 391
2364
+ },
2365
+ {
2366
+ "epoch": 1.78,
2367
+ "learning_rate": 7.0100483722436985e-06,
2368
+ "loss": 0.0867,
2369
+ "step": 392
2370
+ },
2371
+ {
2372
+ "epoch": 1.79,
2373
+ "learning_rate": 6.964452906215815e-06,
2374
+ "loss": 0.075,
2375
+ "step": 393
2376
+ },
2377
+ {
2378
+ "epoch": 1.79,
2379
+ "learning_rate": 6.918926847488477e-06,
2380
+ "loss": 0.0921,
2381
+ "step": 394
2382
+ },
2383
+ {
2384
+ "epoch": 1.8,
2385
+ "learning_rate": 6.873471237007754e-06,
2386
+ "loss": 0.1001,
2387
+ "step": 395
2388
+ },
2389
+ {
2390
+ "epoch": 1.8,
2391
+ "learning_rate": 6.8280871141089415e-06,
2392
+ "loss": 0.0913,
2393
+ "step": 396
2394
+ },
2395
+ {
2396
+ "epoch": 1.8,
2397
+ "learning_rate": 6.782775516492772e-06,
2398
+ "loss": 0.1061,
2399
+ "step": 397
2400
+ },
2401
+ {
2402
+ "epoch": 1.81,
2403
+ "learning_rate": 6.73753748020171e-06,
2404
+ "loss": 0.0903,
2405
+ "step": 398
2406
+ },
2407
+ {
2408
+ "epoch": 1.81,
2409
+ "learning_rate": 6.692374039596241e-06,
2410
+ "loss": 0.0819,
2411
+ "step": 399
2412
+ },
2413
+ {
2414
+ "epoch": 1.82,
2415
+ "learning_rate": 6.64728622733124e-06,
2416
+ "loss": 0.096,
2417
+ "step": 400
2418
+ },
2419
+ {
2420
+ "epoch": 1.82,
2421
+ "learning_rate": 6.602275074332345e-06,
2422
+ "loss": 0.0983,
2423
+ "step": 401
2424
+ },
2425
+ {
2426
+ "epoch": 1.83,
2427
+ "learning_rate": 6.5573416097724e-06,
2428
+ "loss": 0.0965,
2429
+ "step": 402
2430
+ },
2431
+ {
2432
+ "epoch": 1.83,
2433
+ "learning_rate": 6.512486861047911e-06,
2434
+ "loss": 0.0936,
2435
+ "step": 403
2436
+ },
2437
+ {
2438
+ "epoch": 1.84,
2439
+ "learning_rate": 6.467711853755558e-06,
2440
+ "loss": 0.092,
2441
+ "step": 404
2442
+ },
2443
+ {
2444
+ "epoch": 1.84,
2445
+ "learning_rate": 6.423017611668745e-06,
2446
+ "loss": 0.0929,
2447
+ "step": 405
2448
+ },
2449
+ {
2450
+ "epoch": 1.85,
2451
+ "learning_rate": 6.378405156714202e-06,
2452
+ "loss": 0.0856,
2453
+ "step": 406
2454
+ },
2455
+ {
2456
+ "epoch": 1.85,
2457
+ "learning_rate": 6.333875508948592e-06,
2458
+ "loss": 0.0947,
2459
+ "step": 407
2460
+ },
2461
+ {
2462
+ "epoch": 1.85,
2463
+ "learning_rate": 6.289429686535226e-06,
2464
+ "loss": 0.0819,
2465
+ "step": 408
2466
+ },
2467
+ {
2468
+ "epoch": 1.86,
2469
+ "learning_rate": 6.2450687057207395e-06,
2470
+ "loss": 0.0854,
2471
+ "step": 409
2472
+ },
2473
+ {
2474
+ "epoch": 1.86,
2475
+ "learning_rate": 6.200793580811897e-06,
2476
+ "loss": 0.0853,
2477
+ "step": 410
2478
+ },
2479
+ {
2480
+ "epoch": 1.87,
2481
+ "learning_rate": 6.156605324152369e-06,
2482
+ "loss": 0.0836,
2483
+ "step": 411
2484
+ },
2485
+ {
2486
+ "epoch": 1.87,
2487
+ "learning_rate": 6.112504946099605e-06,
2488
+ "loss": 0.0856,
2489
+ "step": 412
2490
+ },
2491
+ {
2492
+ "epoch": 1.88,
2493
+ "learning_rate": 6.068493455001718e-06,
2494
+ "loss": 0.0998,
2495
+ "step": 413
2496
+ },
2497
+ {
2498
+ "epoch": 1.88,
2499
+ "learning_rate": 6.024571857174443e-06,
2500
+ "loss": 0.0738,
2501
+ "step": 414
2502
+ },
2503
+ {
2504
+ "epoch": 1.89,
2505
+ "learning_rate": 5.98074115687811e-06,
2506
+ "loss": 0.0758,
2507
+ "step": 415
2508
+ },
2509
+ {
2510
+ "epoch": 1.89,
2511
+ "learning_rate": 5.937002356294699e-06,
2512
+ "loss": 0.0953,
2513
+ "step": 416
2514
+ },
2515
+ {
2516
+ "epoch": 1.9,
2517
+ "learning_rate": 5.893356455504911e-06,
2518
+ "loss": 0.0919,
2519
+ "step": 417
2520
+ },
2521
+ {
2522
+ "epoch": 1.9,
2523
+ "learning_rate": 5.8498044524653175e-06,
2524
+ "loss": 0.0927,
2525
+ "step": 418
2526
+ },
2527
+ {
2528
+ "epoch": 1.9,
2529
+ "learning_rate": 5.806347342985521e-06,
2530
+ "loss": 0.085,
2531
+ "step": 419
2532
+ },
2533
+ {
2534
+ "epoch": 1.91,
2535
+ "learning_rate": 5.7629861207054135e-06,
2536
+ "loss": 0.071,
2537
+ "step": 420
2538
+ },
2539
+ {
2540
+ "epoch": 1.91,
2541
+ "learning_rate": 5.719721777072425e-06,
2542
+ "loss": 0.0737,
2543
+ "step": 421
2544
+ },
2545
+ {
2546
+ "epoch": 1.92,
2547
+ "learning_rate": 5.676555301318877e-06,
2548
+ "loss": 0.0854,
2549
+ "step": 422
2550
+ },
2551
+ {
2552
+ "epoch": 1.92,
2553
+ "learning_rate": 5.633487680439362e-06,
2554
+ "loss": 0.0858,
2555
+ "step": 423
2556
+ },
2557
+ {
2558
+ "epoch": 1.93,
2559
+ "learning_rate": 5.5905198991681695e-06,
2560
+ "loss": 0.0788,
2561
+ "step": 424
2562
+ },
2563
+ {
2564
+ "epoch": 1.93,
2565
+ "learning_rate": 5.547652939956764e-06,
2566
+ "loss": 0.1029,
2567
+ "step": 425
2568
+ },
2569
+ {
2570
+ "epoch": 1.94,
2571
+ "learning_rate": 5.504887782951343e-06,
2572
+ "loss": 0.0871,
2573
+ "step": 426
2574
+ },
2575
+ {
2576
+ "epoch": 1.94,
2577
+ "learning_rate": 5.462225405970401e-06,
2578
+ "loss": 0.0727,
2579
+ "step": 427
2580
+ },
2581
+ {
2582
+ "epoch": 1.95,
2583
+ "learning_rate": 5.419666784482398e-06,
2584
+ "loss": 0.0787,
2585
+ "step": 428
2586
+ },
2587
+ {
2588
+ "epoch": 1.95,
2589
+ "learning_rate": 5.377212891583419e-06,
2590
+ "loss": 0.1104,
2591
+ "step": 429
2592
+ },
2593
+ {
2594
+ "epoch": 1.95,
2595
+ "learning_rate": 5.3348646979749685e-06,
2596
+ "loss": 0.1016,
2597
+ "step": 430
2598
+ },
2599
+ {
2600
+ "epoch": 1.96,
2601
+ "learning_rate": 5.29262317194174e-06,
2602
+ "loss": 0.0821,
2603
+ "step": 431
2604
+ },
2605
+ {
2606
+ "epoch": 1.96,
2607
+ "learning_rate": 5.250489279329501e-06,
2608
+ "loss": 0.0838,
2609
+ "step": 432
2610
+ },
2611
+ {
2612
+ "epoch": 1.97,
2613
+ "learning_rate": 5.20846398352298e-06,
2614
+ "loss": 0.0849,
2615
+ "step": 433
2616
+ },
2617
+ {
2618
+ "epoch": 1.97,
2619
+ "learning_rate": 5.1665482454238815e-06,
2620
+ "loss": 0.07,
2621
+ "step": 434
2622
+ },
2623
+ {
2624
+ "epoch": 1.98,
2625
+ "learning_rate": 5.124743023428867e-06,
2626
+ "loss": 0.072,
2627
+ "step": 435
2628
+ },
2629
+ {
2630
+ "epoch": 1.98,
2631
+ "learning_rate": 5.083049273407681e-06,
2632
+ "loss": 0.077,
2633
+ "step": 436
2634
+ },
2635
+ {
2636
+ "epoch": 1.99,
2637
+ "learning_rate": 5.041467948681269e-06,
2638
+ "loss": 0.0832,
2639
+ "step": 437
2640
+ },
2641
+ {
2642
+ "epoch": 1.99,
2643
+ "learning_rate": 5.000000000000003e-06,
2644
+ "loss": 0.0964,
2645
+ "step": 438
2646
+ },
2647
+ {
2648
+ "epoch": 2.0,
2649
+ "learning_rate": 4.958646375521909e-06,
2650
+ "loss": 0.0969,
2651
+ "step": 439
2652
+ },
2653
+ {
2654
+ "epoch": 2.0,
2655
+ "eval_loss": 0.0929914191365242,
2656
+ "eval_runtime": 1063.5196,
2657
+ "eval_samples_per_second": 4.413,
2658
+ "eval_steps_per_second": 2.207,
2659
+ "step": 439
2660
+ },
2661
+ {
2662
+ "epoch": 2.0,
2663
+ "learning_rate": 4.917408020791027e-06,
2664
+ "loss": 0.1064,
2665
+ "step": 440
2666
+ },
2667
+ {
2668
+ "epoch": 2.0,
2669
+ "learning_rate": 4.876285878715764e-06,
2670
+ "loss": 0.0924,
2671
+ "step": 441
2672
+ },
2673
+ {
2674
+ "epoch": 2.01,
2675
+ "learning_rate": 4.8352808895473516e-06,
2676
+ "loss": 0.086,
2677
+ "step": 442
2678
+ },
2679
+ {
2680
+ "epoch": 2.01,
2681
+ "learning_rate": 4.794393990858321e-06,
2682
+ "loss": 0.0829,
2683
+ "step": 443
2684
+ },
2685
+ {
2686
+ "epoch": 2.02,
2687
+ "learning_rate": 4.753626117521103e-06,
2688
+ "loss": 0.088,
2689
+ "step": 444
2690
+ },
2691
+ {
2692
+ "epoch": 2.02,
2693
+ "learning_rate": 4.712978201686621e-06,
2694
+ "loss": 0.0954,
2695
+ "step": 445
2696
+ },
2697
+ {
2698
+ "epoch": 2.03,
2699
+ "learning_rate": 4.672451172762998e-06,
2700
+ "loss": 0.0955,
2701
+ "step": 446
2702
+ },
2703
+ {
2704
+ "epoch": 2.03,
2705
+ "learning_rate": 4.632045957394286e-06,
2706
+ "loss": 0.1074,
2707
+ "step": 447
2708
+ },
2709
+ {
2710
+ "epoch": 2.04,
2711
+ "learning_rate": 4.591763479439295e-06,
2712
+ "loss": 0.0955,
2713
+ "step": 448
2714
+ },
2715
+ {
2716
+ "epoch": 2.04,
2717
+ "learning_rate": 4.551604659950466e-06,
2718
+ "loss": 0.0795,
2719
+ "step": 449
2720
+ },
2721
+ {
2722
+ "epoch": 2.05,
2723
+ "learning_rate": 4.5115704171528105e-06,
2724
+ "loss": 0.0857,
2725
+ "step": 450
2726
+ },
2727
+ {
2728
+ "epoch": 2.05,
2729
+ "learning_rate": 4.471661666422899e-06,
2730
+ "loss": 0.0914,
2731
+ "step": 451
2732
+ },
2733
+ {
2734
+ "epoch": 2.05,
2735
+ "learning_rate": 4.431879320267972e-06,
2736
+ "loss": 0.0808,
2737
+ "step": 452
2738
+ },
2739
+ {
2740
+ "epoch": 2.06,
2741
+ "learning_rate": 4.3922242883050226e-06,
2742
+ "loss": 0.0854,
2743
+ "step": 453
2744
+ },
2745
+ {
2746
+ "epoch": 2.06,
2747
+ "learning_rate": 4.3526974772400406e-06,
2748
+ "loss": 0.0941,
2749
+ "step": 454
2750
+ },
2751
+ {
2752
+ "epoch": 2.07,
2753
+ "learning_rate": 4.313299790847263e-06,
2754
+ "loss": 0.09,
2755
+ "step": 455
2756
+ },
2757
+ {
2758
+ "epoch": 2.07,
2759
+ "learning_rate": 4.274032129948512e-06,
2760
+ "loss": 0.0686,
2761
+ "step": 456
2762
+ },
2763
+ {
2764
+ "epoch": 2.08,
2765
+ "learning_rate": 4.234895392392591e-06,
2766
+ "loss": 0.0753,
2767
+ "step": 457
2768
+ },
2769
+ {
2770
+ "epoch": 2.08,
2771
+ "learning_rate": 4.19589047303477e-06,
2772
+ "loss": 0.0982,
2773
+ "step": 458
2774
+ },
2775
+ {
2776
+ "epoch": 2.09,
2777
+ "learning_rate": 4.1570182637163155e-06,
2778
+ "loss": 0.0897,
2779
+ "step": 459
2780
+ },
2781
+ {
2782
+ "epoch": 2.09,
2783
+ "learning_rate": 4.1182796532441e-06,
2784
+ "loss": 0.0719,
2785
+ "step": 460
2786
+ },
2787
+ {
2788
+ "epoch": 2.1,
2789
+ "learning_rate": 4.079675527370273e-06,
2790
+ "loss": 0.0744,
2791
+ "step": 461
2792
+ },
2793
+ {
2794
+ "epoch": 2.1,
2795
+ "learning_rate": 4.041206768772023e-06,
2796
+ "loss": 0.0858,
2797
+ "step": 462
2798
+ },
2799
+ {
2800
+ "epoch": 2.1,
2801
+ "learning_rate": 4.002874257031384e-06,
2802
+ "loss": 0.0785,
2803
+ "step": 463
2804
+ },
2805
+ {
2806
+ "epoch": 2.11,
2807
+ "learning_rate": 3.9646788686151335e-06,
2808
+ "loss": 0.0837,
2809
+ "step": 464
2810
+ },
2811
+ {
2812
+ "epoch": 2.11,
2813
+ "learning_rate": 3.9266214768547335e-06,
2814
+ "loss": 0.0791,
2815
+ "step": 465
2816
+ },
2817
+ {
2818
+ "epoch": 2.12,
2819
+ "learning_rate": 3.888702951926384e-06,
2820
+ "loss": 0.0691,
2821
+ "step": 466
2822
+ },
2823
+ {
2824
+ "epoch": 2.12,
2825
+ "learning_rate": 3.850924160831116e-06,
2826
+ "loss": 0.0881,
2827
+ "step": 467
2828
+ },
2829
+ {
2830
+ "epoch": 2.13,
2831
+ "learning_rate": 3.8132859673749688e-06,
2832
+ "loss": 0.0748,
2833
+ "step": 468
2834
+ },
2835
+ {
2836
+ "epoch": 2.13,
2837
+ "learning_rate": 3.7757892321492297e-06,
2838
+ "loss": 0.0796,
2839
+ "step": 469
2840
+ },
2841
+ {
2842
+ "epoch": 2.14,
2843
+ "learning_rate": 3.738434812510785e-06,
2844
+ "loss": 0.0789,
2845
+ "step": 470
2846
+ },
2847
+ {
2848
+ "epoch": 2.14,
2849
+ "learning_rate": 3.701223562562478e-06,
2850
+ "loss": 0.0819,
2851
+ "step": 471
2852
+ },
2853
+ {
2854
+ "epoch": 2.15,
2855
+ "learning_rate": 3.6641563331336126e-06,
2856
+ "loss": 0.0814,
2857
+ "step": 472
2858
+ },
2859
+ {
2860
+ "epoch": 2.15,
2861
+ "learning_rate": 3.627233971760481e-06,
2862
+ "loss": 0.0704,
2863
+ "step": 473
2864
+ },
2865
+ {
2866
+ "epoch": 2.16,
2867
+ "learning_rate": 3.590457322666997e-06,
2868
+ "loss": 0.0734,
2869
+ "step": 474
2870
+ },
2871
+ {
2872
+ "epoch": 2.16,
2873
+ "learning_rate": 3.5538272267453734e-06,
2874
+ "loss": 0.0944,
2875
+ "step": 475
2876
+ },
2877
+ {
2878
+ "epoch": 2.16,
2879
+ "learning_rate": 3.5173445215369183e-06,
2880
+ "loss": 0.0931,
2881
+ "step": 476
2882
+ },
2883
+ {
2884
+ "epoch": 2.17,
2885
+ "learning_rate": 3.4810100412128743e-06,
2886
+ "loss": 0.0793,
2887
+ "step": 477
2888
+ },
2889
+ {
2890
+ "epoch": 2.17,
2891
+ "learning_rate": 3.4448246165553465e-06,
2892
+ "loss": 0.0686,
2893
+ "step": 478
2894
+ },
2895
+ {
2896
+ "epoch": 2.18,
2897
+ "learning_rate": 3.4087890749382947e-06,
2898
+ "loss": 0.087,
2899
+ "step": 479
2900
+ },
2901
+ {
2902
+ "epoch": 2.18,
2903
+ "learning_rate": 3.37290424030864e-06,
2904
+ "loss": 0.0983,
2905
+ "step": 480
2906
+ },
2907
+ {
2908
+ "epoch": 2.19,
2909
+ "learning_rate": 3.3371709331674075e-06,
2910
+ "loss": 0.079,
2911
+ "step": 481
2912
+ },
2913
+ {
2914
+ "epoch": 2.19,
2915
+ "learning_rate": 3.3015899705509734e-06,
2916
+ "loss": 0.0807,
2917
+ "step": 482
2918
+ },
2919
+ {
2920
+ "epoch": 2.2,
2921
+ "learning_rate": 3.2661621660123666e-06,
2922
+ "loss": 0.1039,
2923
+ "step": 483
2924
+ },
2925
+ {
2926
+ "epoch": 2.2,
2927
+ "learning_rate": 3.2308883296027073e-06,
2928
+ "loss": 0.074,
2929
+ "step": 484
2930
+ },
2931
+ {
2932
+ "epoch": 2.21,
2933
+ "learning_rate": 3.195769267852632e-06,
2934
+ "loss": 0.0684,
2935
+ "step": 485
2936
+ },
2937
+ {
2938
+ "epoch": 2.21,
2939
+ "learning_rate": 3.1608057837538976e-06,
2940
+ "loss": 0.0934,
2941
+ "step": 486
2942
+ },
2943
+ {
2944
+ "epoch": 2.21,
2945
+ "learning_rate": 3.1259986767409866e-06,
2946
+ "loss": 0.0656,
2947
+ "step": 487
2948
+ },
2949
+ {
2950
+ "epoch": 2.22,
2951
+ "learning_rate": 3.0913487426728672e-06,
2952
+ "loss": 0.0694,
2953
+ "step": 488
2954
+ },
2955
+ {
2956
+ "epoch": 2.22,
2957
+ "learning_rate": 3.0568567738147505e-06,
2958
+ "loss": 0.0806,
2959
+ "step": 489
2960
+ },
2961
+ {
2962
+ "epoch": 2.23,
2963
+ "learning_rate": 3.0225235588200096e-06,
2964
+ "loss": 0.0727,
2965
+ "step": 490
2966
+ },
2967
+ {
2968
+ "epoch": 2.23,
2969
+ "learning_rate": 2.988349882712135e-06,
2970
+ "loss": 0.0808,
2971
+ "step": 491
2972
+ },
2973
+ {
2974
+ "epoch": 2.24,
2975
+ "learning_rate": 2.9543365268667866e-06,
2976
+ "loss": 0.0867,
2977
+ "step": 492
2978
+ },
2979
+ {
2980
+ "epoch": 2.24,
2981
+ "learning_rate": 2.9204842689939207e-06,
2982
+ "loss": 0.0908,
2983
+ "step": 493
2984
+ },
2985
+ {
2986
+ "epoch": 2.25,
2987
+ "learning_rate": 2.8867938831200203e-06,
2988
+ "loss": 0.0773,
2989
+ "step": 494
2990
+ },
2991
+ {
2992
+ "epoch": 2.25,
2993
+ "learning_rate": 2.853266139570391e-06,
2994
+ "loss": 0.083,
2995
+ "step": 495
2996
+ },
2997
+ {
2998
+ "epoch": 2.26,
2999
+ "learning_rate": 2.819901804951547e-06,
3000
+ "loss": 0.0818,
3001
+ "step": 496
3002
+ },
3003
+ {
3004
+ "epoch": 2.26,
3005
+ "learning_rate": 2.786701642133678e-06,
3006
+ "loss": 0.0895,
3007
+ "step": 497
3008
+ },
3009
+ {
3010
+ "epoch": 2.26,
3011
+ "learning_rate": 2.7536664102332177e-06,
3012
+ "loss": 0.0895,
3013
+ "step": 498
3014
+ },
3015
+ {
3016
+ "epoch": 2.27,
3017
+ "learning_rate": 2.7207968645954806e-06,
3018
+ "loss": 0.0727,
3019
+ "step": 499
3020
+ },
3021
+ {
3022
+ "epoch": 2.27,
3023
+ "learning_rate": 2.6880937567773903e-06,
3024
+ "loss": 0.1001,
3025
+ "step": 500
3026
+ },
3027
+ {
3028
+ "epoch": 2.28,
3029
+ "learning_rate": 2.655557834530288e-06,
3030
+ "loss": 0.0789,
3031
+ "step": 501
3032
+ },
3033
+ {
3034
+ "epoch": 2.28,
3035
+ "learning_rate": 2.6231898417828605e-06,
3036
+ "loss": 0.0821,
3037
+ "step": 502
3038
+ },
3039
+ {
3040
+ "epoch": 2.29,
3041
+ "learning_rate": 2.590990518624098e-06,
3042
+ "loss": 0.0942,
3043
+ "step": 503
3044
+ },
3045
+ {
3046
+ "epoch": 2.29,
3047
+ "learning_rate": 2.5589606012863968e-06,
3048
+ "loss": 0.0825,
3049
+ "step": 504
3050
+ },
3051
+ {
3052
+ "epoch": 2.3,
3053
+ "learning_rate": 2.5271008221287043e-06,
3054
+ "loss": 0.1024,
3055
+ "step": 505
3056
+ },
3057
+ {
3058
+ "epoch": 2.3,
3059
+ "learning_rate": 2.495411909619804e-06,
3060
+ "loss": 0.083,
3061
+ "step": 506
3062
+ },
3063
+ {
3064
+ "epoch": 2.31,
3065
+ "learning_rate": 2.4638945883216236e-06,
3066
+ "loss": 0.0887,
3067
+ "step": 507
3068
+ },
3069
+ {
3070
+ "epoch": 2.31,
3071
+ "learning_rate": 2.432549578872694e-06,
3072
+ "loss": 0.0871,
3073
+ "step": 508
3074
+ },
3075
+ {
3076
+ "epoch": 2.31,
3077
+ "learning_rate": 2.4013775979716602e-06,
3078
+ "loss": 0.0817,
3079
+ "step": 509
3080
+ },
3081
+ {
3082
+ "epoch": 2.32,
3083
+ "learning_rate": 2.3703793583609013e-06,
3084
+ "loss": 0.0728,
3085
+ "step": 510
3086
+ },
3087
+ {
3088
+ "epoch": 2.32,
3089
+ "learning_rate": 2.339555568810221e-06,
3090
+ "loss": 0.0996,
3091
+ "step": 511
3092
+ },
3093
+ {
3094
+ "epoch": 2.33,
3095
+ "learning_rate": 2.3089069341006563e-06,
3096
+ "loss": 0.0884,
3097
+ "step": 512
3098
+ },
3099
+ {
3100
+ "epoch": 2.33,
3101
+ "learning_rate": 2.2784341550083577e-06,
3102
+ "loss": 0.0915,
3103
+ "step": 513
3104
+ },
3105
+ {
3106
+ "epoch": 2.34,
3107
+ "learning_rate": 2.248137928288564e-06,
3108
+ "loss": 0.0912,
3109
+ "step": 514
3110
+ },
3111
+ {
3112
+ "epoch": 2.34,
3113
+ "learning_rate": 2.218018946659666e-06,
3114
+ "loss": 0.0992,
3115
+ "step": 515
3116
+ },
3117
+ {
3118
+ "epoch": 2.35,
3119
+ "learning_rate": 2.1880778987873806e-06,
3120
+ "loss": 0.0741,
3121
+ "step": 516
3122
+ },
3123
+ {
3124
+ "epoch": 2.35,
3125
+ "learning_rate": 2.158315469268998e-06,
3126
+ "loss": 0.0845,
3127
+ "step": 517
3128
+ },
3129
+ {
3130
+ "epoch": 2.36,
3131
+ "learning_rate": 2.128732338617726e-06,
3132
+ "loss": 0.0811,
3133
+ "step": 518
3134
+ },
3135
+ {
3136
+ "epoch": 2.36,
3137
+ "learning_rate": 2.099329183247126e-06,
3138
+ "loss": 0.0805,
3139
+ "step": 519
3140
+ },
3141
+ {
3142
+ "epoch": 2.36,
3143
+ "learning_rate": 2.0701066754556708e-06,
3144
+ "loss": 0.0911,
3145
+ "step": 520
3146
+ },
3147
+ {
3148
+ "epoch": 2.37,
3149
+ "learning_rate": 2.0410654834113362e-06,
3150
+ "loss": 0.0873,
3151
+ "step": 521
3152
+ },
3153
+ {
3154
+ "epoch": 2.37,
3155
+ "learning_rate": 2.012206271136353e-06,
3156
+ "loss": 0.0807,
3157
+ "step": 522
3158
+ },
3159
+ {
3160
+ "epoch": 2.38,
3161
+ "learning_rate": 1.983529698492006e-06,
3162
+ "loss": 0.072,
3163
+ "step": 523
3164
+ },
3165
+ {
3166
+ "epoch": 2.38,
3167
+ "learning_rate": 1.9550364211635674e-06,
3168
+ "loss": 0.0962,
3169
+ "step": 524
3170
+ },
3171
+ {
3172
+ "epoch": 2.39,
3173
+ "learning_rate": 1.926727090645275e-06,
3174
+ "loss": 0.0838,
3175
+ "step": 525
3176
+ },
3177
+ {
3178
+ "epoch": 2.39,
3179
+ "learning_rate": 1.8986023542254617e-06,
3180
+ "loss": 0.0791,
3181
+ "step": 526
3182
+ },
3183
+ {
3184
+ "epoch": 2.4,
3185
+ "learning_rate": 1.8706628549717453e-06,
3186
+ "loss": 0.0748,
3187
+ "step": 527
3188
+ },
3189
+ {
3190
+ "epoch": 2.4,
3191
+ "learning_rate": 1.8429092317163244e-06,
3192
+ "loss": 0.1024,
3193
+ "step": 528
3194
+ },
3195
+ {
3196
+ "epoch": 2.41,
3197
+ "learning_rate": 1.8153421190413668e-06,
3198
+ "loss": 0.0797,
3199
+ "step": 529
3200
+ },
3201
+ {
3202
+ "epoch": 2.41,
3203
+ "learning_rate": 1.7879621472645147e-06,
3204
+ "loss": 0.0757,
3205
+ "step": 530
3206
+ },
3207
+ {
3208
+ "epoch": 2.41,
3209
+ "learning_rate": 1.7607699424244583e-06,
3210
+ "loss": 0.0907,
3211
+ "step": 531
3212
+ },
3213
+ {
3214
+ "epoch": 2.42,
3215
+ "learning_rate": 1.7337661262666294e-06,
3216
+ "loss": 0.0787,
3217
+ "step": 532
3218
+ },
3219
+ {
3220
+ "epoch": 2.42,
3221
+ "learning_rate": 1.7069513162289786e-06,
3222
+ "loss": 0.0882,
3223
+ "step": 533
3224
+ },
3225
+ {
3226
+ "epoch": 2.43,
3227
+ "learning_rate": 1.6803261254278635e-06,
3228
+ "loss": 0.0727,
3229
+ "step": 534
3230
+ },
3231
+ {
3232
+ "epoch": 2.43,
3233
+ "learning_rate": 1.6538911626440312e-06,
3234
+ "loss": 0.0866,
3235
+ "step": 535
3236
+ },
3237
+ {
3238
+ "epoch": 2.44,
3239
+ "learning_rate": 1.6276470323086936e-06,
3240
+ "loss": 0.0836,
3241
+ "step": 536
3242
+ },
3243
+ {
3244
+ "epoch": 2.44,
3245
+ "learning_rate": 1.6015943344897022e-06,
3246
+ "loss": 0.0765,
3247
+ "step": 537
3248
+ },
3249
+ {
3250
+ "epoch": 2.45,
3251
+ "learning_rate": 1.575733664877851e-06,
3252
+ "loss": 0.0732,
3253
+ "step": 538
3254
+ },
3255
+ {
3256
+ "epoch": 2.45,
3257
+ "learning_rate": 1.5500656147732208e-06,
3258
+ "loss": 0.0869,
3259
+ "step": 539
3260
+ },
3261
+ {
3262
+ "epoch": 2.46,
3263
+ "learning_rate": 1.5245907710716912e-06,
3264
+ "loss": 0.0878,
3265
+ "step": 540
3266
+ },
3267
+ {
3268
+ "epoch": 2.46,
3269
+ "learning_rate": 1.499309716251498e-06,
3270
+ "loss": 0.0655,
3271
+ "step": 541
3272
+ },
3273
+ {
3274
+ "epoch": 2.46,
3275
+ "learning_rate": 1.474223028359939e-06,
3276
+ "loss": 0.0876,
3277
+ "step": 542
3278
+ },
3279
+ {
3280
+ "epoch": 2.47,
3281
+ "learning_rate": 1.4493312810001293e-06,
3282
+ "loss": 0.0951,
3283
+ "step": 543
3284
+ },
3285
+ {
3286
+ "epoch": 2.47,
3287
+ "learning_rate": 1.4246350433179057e-06,
3288
+ "loss": 0.0748,
3289
+ "step": 544
3290
+ },
3291
+ {
3292
+ "epoch": 2.48,
3293
+ "learning_rate": 1.4001348799888093e-06,
3294
+ "loss": 0.0889,
3295
+ "step": 545
3296
+ },
3297
+ {
3298
+ "epoch": 2.48,
3299
+ "learning_rate": 1.3758313512051702e-06,
3300
+ "loss": 0.0862,
3301
+ "step": 546
3302
+ },
3303
+ {
3304
+ "epoch": 2.49,
3305
+ "learning_rate": 1.3517250126632986e-06,
3306
+ "loss": 0.0766,
3307
+ "step": 547
3308
+ },
3309
+ {
3310
+ "epoch": 2.49,
3311
+ "learning_rate": 1.3278164155507844e-06,
3312
+ "loss": 0.0801,
3313
+ "step": 548
3314
+ },
3315
+ {
3316
+ "epoch": 2.5,
3317
+ "learning_rate": 1.30410610653389e-06,
3318
+ "loss": 0.0878,
3319
+ "step": 549
3320
+ },
3321
+ {
3322
+ "epoch": 2.5,
3323
+ "learning_rate": 1.2805946277450565e-06,
3324
+ "loss": 0.0827,
3325
+ "step": 550
3326
+ },
3327
+ {
3328
+ "epoch": 2.51,
3329
+ "learning_rate": 1.257282516770494e-06,
3330
+ "loss": 0.0775,
3331
+ "step": 551
3332
+ },
3333
+ {
3334
+ "epoch": 2.51,
3335
+ "learning_rate": 1.2341703066379073e-06,
3336
+ "loss": 0.084,
3337
+ "step": 552
3338
+ },
3339
+ {
3340
+ "epoch": 2.51,
3341
+ "learning_rate": 1.2112585258042963e-06,
3342
+ "loss": 0.0907,
3343
+ "step": 553
3344
+ },
3345
+ {
3346
+ "epoch": 2.52,
3347
+ "learning_rate": 1.1885476981438837e-06,
3348
+ "loss": 0.0802,
3349
+ "step": 554
3350
+ },
3351
+ {
3352
+ "epoch": 2.52,
3353
+ "learning_rate": 1.1660383429361155e-06,
3354
+ "loss": 0.0793,
3355
+ "step": 555
3356
+ },
3357
+ {
3358
+ "epoch": 2.53,
3359
+ "learning_rate": 1.1437309748538205e-06,
3360
+ "loss": 0.0753,
3361
+ "step": 556
3362
+ },
3363
+ {
3364
+ "epoch": 2.53,
3365
+ "learning_rate": 1.1216261039514087e-06,
3366
+ "loss": 0.0823,
3367
+ "step": 557
3368
+ },
3369
+ {
3370
+ "epoch": 2.54,
3371
+ "learning_rate": 1.0997242356532335e-06,
3372
+ "loss": 0.0719,
3373
+ "step": 558
3374
+ },
3375
+ {
3376
+ "epoch": 2.54,
3377
+ "learning_rate": 1.0780258707420222e-06,
3378
+ "loss": 0.069,
3379
+ "step": 559
3380
+ },
3381
+ {
3382
+ "epoch": 2.55,
3383
+ "learning_rate": 1.0565315053474324e-06,
3384
+ "loss": 0.0797,
3385
+ "step": 560
3386
+ },
3387
+ {
3388
+ "epoch": 2.55,
3389
+ "learning_rate": 1.0352416309347003e-06,
3390
+ "loss": 0.0728,
3391
+ "step": 561
3392
+ },
3393
+ {
3394
+ "epoch": 2.56,
3395
+ "learning_rate": 1.0141567342934134e-06,
3396
+ "loss": 0.0973,
3397
+ "step": 562
3398
+ },
3399
+ {
3400
+ "epoch": 2.56,
3401
+ "learning_rate": 9.932772975263727e-07,
3402
+ "loss": 0.0648,
3403
+ "step": 563
3404
+ },
3405
+ {
3406
+ "epoch": 2.56,
3407
+ "learning_rate": 9.72603798038574e-07,
3408
+ "loss": 0.0849,
3409
+ "step": 564
3410
+ },
3411
+ {
3412
+ "epoch": 2.57,
3413
+ "learning_rate": 9.521367085262845e-07,
3414
+ "loss": 0.0751,
3415
+ "step": 565
3416
+ },
3417
+ {
3418
+ "epoch": 2.57,
3419
+ "learning_rate": 9.318764969662475e-07,
3420
+ "loss": 0.0951,
3421
+ "step": 566
3422
+ },
3423
+ {
3424
+ "epoch": 2.58,
3425
+ "learning_rate": 9.118236266049707e-07,
3426
+ "loss": 0.0763,
3427
+ "step": 567
3428
+ },
3429
+ {
3430
+ "epoch": 2.58,
3431
+ "learning_rate": 8.919785559481409e-07,
3432
+ "loss": 0.077,
3433
+ "step": 568
3434
+ },
3435
+ {
3436
+ "epoch": 2.59,
3437
+ "learning_rate": 8.723417387501332e-07,
3438
+ "loss": 0.0663,
3439
+ "step": 569
3440
+ },
3441
+ {
3442
+ "epoch": 2.59,
3443
+ "learning_rate": 8.529136240036439e-07,
3444
+ "loss": 0.0694,
3445
+ "step": 570
3446
+ },
3447
+ {
3448
+ "epoch": 2.6,
3449
+ "learning_rate": 8.336946559294223e-07,
3450
+ "loss": 0.0931,
3451
+ "step": 571
3452
+ },
3453
+ {
3454
+ "epoch": 2.6,
3455
+ "learning_rate": 8.146852739661104e-07,
3456
+ "loss": 0.081,
3457
+ "step": 572
3458
+ },
3459
+ {
3460
+ "epoch": 2.61,
3461
+ "learning_rate": 7.958859127601937e-07,
3462
+ "loss": 0.0875,
3463
+ "step": 573
3464
+ },
3465
+ {
3466
+ "epoch": 2.61,
3467
+ "learning_rate": 7.772970021560755e-07,
3468
+ "loss": 0.087,
3469
+ "step": 574
3470
+ },
3471
+ {
3472
+ "epoch": 2.61,
3473
+ "learning_rate": 7.589189671862307e-07,
3474
+ "loss": 0.0823,
3475
+ "step": 575
3476
+ },
3477
+ {
3478
+ "epoch": 2.62,
3479
+ "learning_rate": 7.40752228061502e-07,
3480
+ "loss": 0.0971,
3481
+ "step": 576
3482
+ },
3483
+ {
3484
+ "epoch": 2.62,
3485
+ "learning_rate": 7.227972001614825e-07,
3486
+ "loss": 0.085,
3487
+ "step": 577
3488
+ },
3489
+ {
3490
+ "epoch": 2.63,
3491
+ "learning_rate": 7.050542940250271e-07,
3492
+ "loss": 0.0876,
3493
+ "step": 578
3494
+ },
3495
+ {
3496
+ "epoch": 2.63,
3497
+ "learning_rate": 6.875239153408541e-07,
3498
+ "loss": 0.0934,
3499
+ "step": 579
3500
+ },
3501
+ {
3502
+ "epoch": 2.64,
3503
+ "learning_rate": 6.702064649382778e-07,
3504
+ "loss": 0.0782,
3505
+ "step": 580
3506
+ },
3507
+ {
3508
+ "epoch": 2.64,
3509
+ "learning_rate": 6.531023387780433e-07,
3510
+ "loss": 0.0758,
3511
+ "step": 581
3512
+ },
3513
+ {
3514
+ "epoch": 2.65,
3515
+ "learning_rate": 6.36211927943271e-07,
3516
+ "loss": 0.1023,
3517
+ "step": 582
3518
+ },
3519
+ {
3520
+ "epoch": 2.65,
3521
+ "learning_rate": 6.195356186305101e-07,
3522
+ "loss": 0.1103,
3523
+ "step": 583
3524
+ },
3525
+ {
3526
+ "epoch": 2.66,
3527
+ "learning_rate": 6.030737921409169e-07,
3528
+ "loss": 0.0871,
3529
+ "step": 584
3530
+ },
3531
+ {
3532
+ "epoch": 2.66,
3533
+ "learning_rate": 5.868268248715292e-07,
3534
+ "loss": 0.0756,
3535
+ "step": 585
3536
+ },
3537
+ {
3538
+ "epoch": 2.66,
3539
+ "learning_rate": 5.707950883066681e-07,
3540
+ "loss": 0.0849,
3541
+ "step": 586
3542
+ },
3543
+ {
3544
+ "epoch": 2.67,
3545
+ "learning_rate": 5.549789490094304e-07,
3546
+ "loss": 0.0877,
3547
+ "step": 587
3548
+ },
3549
+ {
3550
+ "epoch": 2.67,
3551
+ "learning_rate": 5.393787686133234e-07,
3552
+ "loss": 0.0862,
3553
+ "step": 588
3554
+ },
3555
+ {
3556
+ "epoch": 2.68,
3557
+ "learning_rate": 5.239949038139858e-07,
3558
+ "loss": 0.1067,
3559
+ "step": 589
3560
+ },
3561
+ {
3562
+ "epoch": 2.68,
3563
+ "learning_rate": 5.088277063610347e-07,
3564
+ "loss": 0.0804,
3565
+ "step": 590
3566
+ },
3567
+ {
3568
+ "epoch": 2.69,
3569
+ "learning_rate": 4.938775230500192e-07,
3570
+ "loss": 0.0877,
3571
+ "step": 591
3572
+ },
3573
+ {
3574
+ "epoch": 2.69,
3575
+ "learning_rate": 4.791446957145041e-07,
3576
+ "loss": 0.0774,
3577
+ "step": 592
3578
+ },
3579
+ {
3580
+ "epoch": 2.7,
3581
+ "learning_rate": 4.646295612182339e-07,
3582
+ "loss": 0.0769,
3583
+ "step": 593
3584
+ },
3585
+ {
3586
+ "epoch": 2.7,
3587
+ "learning_rate": 4.503324514474483e-07,
3588
+ "loss": 0.0745,
3589
+ "step": 594
3590
+ },
3591
+ {
3592
+ "epoch": 2.71,
3593
+ "learning_rate": 4.3625369330328127e-07,
3594
+ "loss": 0.094,
3595
+ "step": 595
3596
+ },
3597
+ {
3598
+ "epoch": 2.71,
3599
+ "learning_rate": 4.223936086942981e-07,
3600
+ "loss": 0.0775,
3601
+ "step": 596
3602
+ },
3603
+ {
3604
+ "epoch": 2.71,
3605
+ "learning_rate": 4.087525145291205e-07,
3606
+ "loss": 0.1056,
3607
+ "step": 597
3608
+ },
3609
+ {
3610
+ "epoch": 2.72,
3611
+ "learning_rate": 3.95330722709194e-07,
3612
+ "loss": 0.0898,
3613
+ "step": 598
3614
+ },
3615
+ {
3616
+ "epoch": 2.72,
3617
+ "learning_rate": 3.821285401216501e-07,
3618
+ "loss": 0.0896,
3619
+ "step": 599
3620
+ },
3621
+ {
3622
+ "epoch": 2.73,
3623
+ "learning_rate": 3.6914626863229e-07,
3624
+ "loss": 0.0738,
3625
+ "step": 600
3626
+ },
3627
+ {
3628
+ "epoch": 2.73,
3629
+ "learning_rate": 3.5638420507868145e-07,
3630
+ "loss": 0.0854,
3631
+ "step": 601
3632
+ },
3633
+ {
3634
+ "epoch": 2.74,
3635
+ "learning_rate": 3.438426412633733e-07,
3636
+ "loss": 0.0809,
3637
+ "step": 602
3638
+ },
3639
+ {
3640
+ "epoch": 2.74,
3641
+ "learning_rate": 3.3152186394722506e-07,
3642
+ "loss": 0.0891,
3643
+ "step": 603
3644
+ },
3645
+ {
3646
+ "epoch": 2.75,
3647
+ "learning_rate": 3.1942215484284666e-07,
3648
+ "loss": 0.0825,
3649
+ "step": 604
3650
+ },
3651
+ {
3652
+ "epoch": 2.75,
3653
+ "learning_rate": 3.075437906081558e-07,
3654
+ "loss": 0.0721,
3655
+ "step": 605
3656
+ },
3657
+ {
3658
+ "epoch": 2.76,
3659
+ "learning_rate": 2.9588704284006176e-07,
3660
+ "loss": 0.0731,
3661
+ "step": 606
3662
+ },
3663
+ {
3664
+ "epoch": 2.76,
3665
+ "learning_rate": 2.844521780682408e-07,
3666
+ "loss": 0.0932,
3667
+ "step": 607
3668
+ },
3669
+ {
3670
+ "epoch": 2.76,
3671
+ "learning_rate": 2.7323945774905714e-07,
3672
+ "loss": 0.0827,
3673
+ "step": 608
3674
+ },
3675
+ {
3676
+ "epoch": 2.77,
3677
+ "learning_rate": 2.6224913825956933e-07,
3678
+ "loss": 0.0824,
3679
+ "step": 609
3680
+ },
3681
+ {
3682
+ "epoch": 2.77,
3683
+ "learning_rate": 2.5148147089168573e-07,
3684
+ "loss": 0.0726,
3685
+ "step": 610
3686
+ },
3687
+ {
3688
+ "epoch": 2.78,
3689
+ "learning_rate": 2.4093670184640263e-07,
3690
+ "loss": 0.0922,
3691
+ "step": 611
3692
+ },
3693
+ {
3694
+ "epoch": 2.78,
3695
+ "learning_rate": 2.3061507222818303e-07,
3696
+ "loss": 0.0792,
3697
+ "step": 612
3698
+ },
3699
+ {
3700
+ "epoch": 2.79,
3701
+ "learning_rate": 2.2051681803944457e-07,
3702
+ "loss": 0.0916,
3703
+ "step": 613
3704
+ },
3705
+ {
3706
+ "epoch": 2.79,
3707
+ "learning_rate": 2.1064217017516154e-07,
3708
+ "loss": 0.1045,
3709
+ "step": 614
3710
+ },
3711
+ {
3712
+ "epoch": 2.8,
3713
+ "learning_rate": 2.00991354417579e-07,
3714
+ "loss": 0.0695,
3715
+ "step": 615
3716
+ },
3717
+ {
3718
+ "epoch": 2.8,
3719
+ "learning_rate": 1.9156459143106598e-07,
3720
+ "loss": 0.0729,
3721
+ "step": 616
3722
+ },
3723
+ {
3724
+ "epoch": 2.81,
3725
+ "learning_rate": 1.8236209675705275e-07,
3726
+ "loss": 0.0842,
3727
+ "step": 617
3728
+ },
3729
+ {
3730
+ "epoch": 2.81,
3731
+ "learning_rate": 1.7338408080911473e-07,
3732
+ "loss": 0.0746,
3733
+ "step": 618
3734
+ },
3735
+ {
3736
+ "epoch": 2.81,
3737
+ "learning_rate": 1.6463074886815644e-07,
3738
+ "loss": 0.0999,
3739
+ "step": 619
3740
+ },
3741
+ {
3742
+ "epoch": 2.82,
3743
+ "learning_rate": 1.5610230107771518e-07,
3744
+ "loss": 0.0964,
3745
+ "step": 620
3746
+ },
3747
+ {
3748
+ "epoch": 2.82,
3749
+ "learning_rate": 1.4779893243939358e-07,
3750
+ "loss": 0.0894,
3751
+ "step": 621
3752
+ },
3753
+ {
3754
+ "epoch": 2.83,
3755
+ "learning_rate": 1.397208328083921e-07,
3756
+ "loss": 0.0926,
3757
+ "step": 622
3758
+ },
3759
+ {
3760
+ "epoch": 2.83,
3761
+ "learning_rate": 1.3186818688916803e-07,
3762
+ "loss": 0.1041,
3763
+ "step": 623
3764
+ },
3765
+ {
3766
+ "epoch": 2.84,
3767
+ "learning_rate": 1.242411742312233e-07,
3768
+ "loss": 0.08,
3769
+ "step": 624
3770
+ },
3771
+ {
3772
+ "epoch": 2.84,
3773
+ "learning_rate": 1.168399692249833e-07,
3774
+ "loss": 0.0925,
3775
+ "step": 625
3776
+ },
3777
+ {
3778
+ "epoch": 2.85,
3779
+ "learning_rate": 1.0966474109782354e-07,
3780
+ "loss": 0.0745,
3781
+ "step": 626
3782
+ },
3783
+ {
3784
+ "epoch": 2.85,
3785
+ "learning_rate": 1.0271565391018922e-07,
3786
+ "loss": 0.0791,
3787
+ "step": 627
3788
+ },
3789
+ {
3790
+ "epoch": 2.86,
3791
+ "learning_rate": 9.599286655185502e-08,
3792
+ "loss": 0.0964,
3793
+ "step": 628
3794
+ },
3795
+ {
3796
+ "epoch": 2.86,
3797
+ "learning_rate": 8.949653273827907e-08,
3798
+ "loss": 0.0654,
3799
+ "step": 629
3800
+ },
3801
+ {
3802
+ "epoch": 2.86,
3803
+ "learning_rate": 8.322680100710023e-08,
3804
+ "loss": 0.0887,
3805
+ "step": 630
3806
+ },
3807
+ {
3808
+ "epoch": 2.87,
3809
+ "learning_rate": 7.718381471473524e-08,
3810
+ "loss": 0.0951,
3811
+ "step": 631
3812
+ },
3813
+ {
3814
+ "epoch": 2.87,
3815
+ "learning_rate": 7.136771203310244e-08,
3816
+ "loss": 0.0881,
3817
+ "step": 632
3818
+ },
3819
+ {
3820
+ "epoch": 2.88,
3821
+ "learning_rate": 6.577862594646323e-08,
3822
+ "loss": 0.0658,
3823
+ "step": 633
3824
+ },
3825
+ {
3826
+ "epoch": 2.88,
3827
+ "learning_rate": 6.041668424837888e-08,
3828
+ "loss": 0.096,
3829
+ "step": 634
3830
+ },
3831
+ {
3832
+ "epoch": 2.89,
3833
+ "learning_rate": 5.5282009538794036e-08,
3834
+ "loss": 0.0998,
3835
+ "step": 635
3836
+ },
3837
+ {
3838
+ "epoch": 2.89,
3839
+ "learning_rate": 5.037471922122561e-08,
3840
+ "loss": 0.087,
3841
+ "step": 636
3842
+ },
3843
+ {
3844
+ "epoch": 2.9,
3845
+ "learning_rate": 4.569492550008603e-08,
3846
+ "loss": 0.0887,
3847
+ "step": 637
3848
+ },
3849
+ {
3850
+ "epoch": 2.9,
3851
+ "learning_rate": 4.1242735378111966e-08,
3852
+ "loss": 0.0916,
3853
+ "step": 638
3854
+ },
3855
+ {
3856
+ "epoch": 2.91,
3857
+ "learning_rate": 3.701825065392184e-08,
3858
+ "loss": 0.09,
3859
+ "step": 639
3860
+ },
3861
+ {
3862
+ "epoch": 2.91,
3863
+ "learning_rate": 3.3021567919686583e-08,
3864
+ "loss": 0.0741,
3865
+ "step": 640
3866
+ },
3867
+ {
3868
+ "epoch": 2.91,
3869
+ "learning_rate": 2.925277855891695e-08,
3870
+ "loss": 0.1025,
3871
+ "step": 641
3872
+ },
3873
+ {
3874
+ "epoch": 2.92,
3875
+ "learning_rate": 2.5711968744382975e-08,
3876
+ "loss": 0.0833,
3877
+ "step": 642
3878
+ },
3879
+ {
3880
+ "epoch": 2.92,
3881
+ "learning_rate": 2.2399219436137765e-08,
3882
+ "loss": 0.093,
3883
+ "step": 643
3884
+ },
3885
+ {
3886
+ "epoch": 2.93,
3887
+ "learning_rate": 1.9314606379664537e-08,
3888
+ "loss": 0.0936,
3889
+ "step": 644
3890
+ },
3891
+ {
3892
+ "epoch": 2.93,
3893
+ "learning_rate": 1.6458200104149115e-08,
3894
+ "loss": 0.081,
3895
+ "step": 645
3896
+ },
3897
+ {
3898
+ "epoch": 2.94,
3899
+ "learning_rate": 1.3830065920867886e-08,
3900
+ "loss": 0.0901,
3901
+ "step": 646
3902
+ },
3903
+ {
3904
+ "epoch": 2.94,
3905
+ "learning_rate": 1.143026392168789e-08,
3906
+ "loss": 0.0878,
3907
+ "step": 647
3908
+ },
3909
+ {
3910
+ "epoch": 2.95,
3911
+ "learning_rate": 9.25884897770013e-09,
3912
+ "loss": 0.0743,
3913
+ "step": 648
3914
+ },
3915
+ {
3916
+ "epoch": 2.95,
3917
+ "learning_rate": 7.315870737961694e-09,
3918
+ "loss": 0.0736,
3919
+ "step": 649
3920
+ },
3921
+ {
3922
+ "epoch": 2.96,
3923
+ "learning_rate": 5.6013736283611065e-09,
3924
+ "loss": 0.0815,
3925
+ "step": 650
3926
+ },
3927
+ {
3928
+ "epoch": 2.96,
3929
+ "learning_rate": 4.11539685059914e-09,
3930
+ "loss": 0.089,
3931
+ "step": 651
3932
+ },
3933
+ {
3934
+ "epoch": 2.96,
3935
+ "learning_rate": 2.8579743813006434e-09,
3936
+ "loss": 0.0774,
3937
+ "step": 652
3938
+ },
3939
+ {
3940
+ "epoch": 2.97,
3941
+ "learning_rate": 1.8291349712273864e-09,
3942
+ "loss": 0.0751,
3943
+ "step": 653
3944
+ },
3945
+ {
3946
+ "epoch": 2.97,
3947
+ "learning_rate": 1.0289021446308057e-09,
3948
+ "loss": 0.0823,
3949
+ "step": 654
3950
+ },
3951
+ {
3952
+ "epoch": 2.98,
3953
+ "learning_rate": 4.5729419870577106e-10,
3954
+ "loss": 0.1049,
3955
+ "step": 655
3956
+ },
3957
+ {
3958
+ "epoch": 2.98,
3959
+ "learning_rate": 1.1432420317758486e-10,
3960
+ "loss": 0.0864,
3961
+ "step": 656
3962
+ },
3963
+ {
3964
+ "epoch": 2.99,
3965
+ "learning_rate": 0.0,
3966
+ "loss": 0.0795,
3967
+ "step": 657
3968
+ },
3969
+ {
3970
+ "epoch": 2.99,
3971
+ "eval_loss": 0.09112608432769775,
3972
+ "eval_runtime": 1063.7673,
3973
+ "eval_samples_per_second": 4.412,
3974
+ "eval_steps_per_second": 2.206,
3975
+ "step": 657
3976
+ },
3977
+ {
3978
+ "epoch": 2.99,
3979
+ "step": 657,
3980
+ "total_flos": 3.0284260385446953e+18,
3981
+ "train_loss": 0.11520915337848155,
3982
+ "train_runtime": 83607.727,
3983
+ "train_samples_per_second": 1.515,
3984
+ "train_steps_per_second": 0.008
3985
+ }
3986
+ ],
3987
+ "logging_steps": 1,
3988
+ "max_steps": 657,
3989
+ "num_train_epochs": 3,
3990
+ "save_steps": 500,
3991
+ "total_flos": 3.0284260385446953e+18,
3992
+ "trial_name": null,
3993
+ "trial_params": null
3994
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8aef980aecb829ec84079420898c40324a7368761341a8434865e84b3c4e8a48
3
+ size 4664