wzhouad commited on
Commit
83582ac
1 Parent(s): df30c76

Model save

Browse files
README.md CHANGED
@@ -1,4 +1,6 @@
1
  ---
 
 
2
  tags:
3
  - trl
4
  - dpo
@@ -13,17 +15,7 @@ should probably proofread and complete it, then remove this comment. -->
13
 
14
  # zephyr-7b-dpo-full
15
 
16
- This model was trained from scratch on the None dataset.
17
- It achieves the following results on the evaluation set:
18
- - Loss: 0.0449
19
- - Rewards/chosen: -0.6529
20
- - Rewards/rejected: -1.2907
21
- - Rewards/accuracies: 0.7383
22
- - Rewards/margins: 0.6378
23
- - Logps/rejected: -518.3444
24
- - Logps/chosen: -448.4197
25
- - Logits/rejected: 1.6154
26
- - Logits/chosen: 1.4915
27
 
28
  ## Model description
29
 
@@ -43,12 +35,12 @@ More information needed
43
 
44
  The following hyperparameters were used during training:
45
  - learning_rate: 5e-07
46
- - train_batch_size: 4
47
  - eval_batch_size: 8
48
- - seed: 2
49
  - distributed_type: multi-GPU
50
  - num_devices: 8
51
- - gradient_accumulation_steps: 4
52
  - total_train_batch_size: 128
53
  - total_eval_batch_size: 64
54
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
@@ -58,12 +50,6 @@ The following hyperparameters were used during training:
58
 
59
  ### Training results
60
 
61
- | Training Loss | Epoch | Step | Validation Loss | Rewards/chosen | Rewards/rejected | Rewards/accuracies | Rewards/margins | Logps/rejected | Logps/chosen | Logits/rejected | Logits/chosen |
62
- |:-------------:|:-----:|:----:|:---------------:|:--------------:|:----------------:|:------------------:|:---------------:|:--------------:|:------------:|:---------------:|:-------------:|
63
- | 0.09 | 0.21 | 100 | 0.0844 | -0.0945 | -0.2957 | 0.6992 | 0.2012 | -418.8424 | -392.5748 | 0.5303 | 0.4926 |
64
- | 0.0405 | 0.42 | 200 | 0.0452 | -0.6625 | -1.1657 | 0.7539 | 0.5032 | -505.8466 | -449.3789 | 1.2734 | 1.1859 |
65
- | 0.0479 | 0.63 | 300 | 0.0477 | -0.5208 | -1.1204 | 0.7383 | 0.5995 | -501.3084 | -435.2133 | 1.4093 | 1.2982 |
66
- | 0.0457 | 0.84 | 400 | 0.0449 | -0.6529 | -1.2907 | 0.7383 | 0.6378 | -518.3444 | -448.4197 | 1.6154 | 1.4915 |
67
 
68
 
69
  ### Framework versions
 
1
  ---
2
+ license: mit
3
+ base_model: HuggingFaceH4/mistral-7b-sft-beta
4
  tags:
5
  - trl
6
  - dpo
 
15
 
16
  # zephyr-7b-dpo-full
17
 
18
+ This model is a fine-tuned version of [HuggingFaceH4/mistral-7b-sft-beta](https://huggingface.co/HuggingFaceH4/mistral-7b-sft-beta) on the None dataset.
 
 
 
 
 
 
 
 
 
 
19
 
20
  ## Model description
21
 
 
35
 
36
  The following hyperparameters were used during training:
37
  - learning_rate: 5e-07
38
+ - train_batch_size: 8
39
  - eval_batch_size: 8
40
+ - seed: 4
41
  - distributed_type: multi-GPU
42
  - num_devices: 8
43
+ - gradient_accumulation_steps: 2
44
  - total_train_batch_size: 128
45
  - total_eval_batch_size: 64
46
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
 
50
 
51
  ### Training results
52
 
 
 
 
 
 
 
53
 
54
 
55
  ### Framework versions
all_results.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "epoch": 1.0,
3
- "train_loss": 0.0564979040210352,
4
- "train_runtime": 4410.0999,
5
- "train_samples": 61134,
6
- "train_samples_per_second": 13.862,
7
- "train_steps_per_second": 0.108
8
  }
 
1
  {
2
  "epoch": 1.0,
3
+ "train_loss": 0.08422429972704783,
4
+ "train_runtime": 6988.5729,
5
+ "train_samples": 113028,
6
+ "train_samples_per_second": 16.173,
7
+ "train_steps_per_second": 0.126
8
  }
config.json CHANGED
@@ -1,28 +1,25 @@
1
  {
2
- "_name_or_path": "/share5/projects/llm/finetune-accelerate/weight/Meta-Llama-3-8B-Instruct",
3
  "architectures": [
4
- "LlamaForCausalLM"
5
  ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "bos_token_id": 128000,
9
- "eos_token_id": 128001,
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 14336,
14
- "max_position_embeddings": 8192,
15
- "model_type": "llama",
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 32,
18
  "num_key_value_heads": 8,
19
- "pretraining_tp": 1,
20
  "rms_norm_eps": 1e-05,
21
- "rope_scaling": null,
22
- "rope_theta": 500000.0,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.35.2",
26
  "use_cache": false,
27
- "vocab_size": 128256
28
  }
 
1
  {
2
+ "_name_or_path": "HuggingFaceH4/mistral-7b-sft-beta",
3
  "architectures": [
4
+ "MistralForCausalLM"
5
  ],
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
 
 
8
  "hidden_act": "silu",
9
  "hidden_size": 4096,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 14336,
12
+ "max_position_embeddings": 32768,
13
+ "model_type": "mistral",
14
  "num_attention_heads": 32,
15
  "num_hidden_layers": 32,
16
  "num_key_value_heads": 8,
 
17
  "rms_norm_eps": 1e-05,
18
+ "rope_theta": 10000.0,
19
+ "sliding_window": 4096,
20
  "tie_word_embeddings": false,
21
  "torch_dtype": "bfloat16",
22
  "transformers_version": "4.35.2",
23
  "use_cache": false,
24
+ "vocab_size": 32000
25
  }
generation_config.json CHANGED
@@ -1,9 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 128000,
4
- "eos_token_id": [
5
- 128001,
6
- 128009
7
- ],
8
  "transformers_version": "4.35.2"
9
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
 
 
 
5
  "transformers_version": "4.35.2"
6
  }
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:986c5547392df0f9311e5f3214dbe87c6a46cebc31fabcf4560f2e13a5a0b1fa
3
  size 4943162336
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:830483ca7b1a7d164193ef40941fb8c1185bcbd9aea0f326fe666d87fc4f85db
3
  size 4943162336
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bed3327fd3218a867809de5b2c4b29db57c5f3adff700b46f9c0fd8d8861a9ce
3
  size 4999819336
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49d1a3687da7a34f3a85bae6a2d6147ff1dc272d2dc3e7a2cf612e1866a1d34e
3
  size 4999819336
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:510cf41bcf8e3c7d8892a36da515a942da1383c93e7a4b08ad5d2b2bd37f1e72
3
  size 4540516344
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52e4d6b9a4cffb1d45574446b3bc4d5dbaee85c169802d75ec580a536037b327
3
  size 4540516344
model.safetensors.index.json CHANGED
@@ -1,298 +1,298 @@
1
  {
2
  "metadata": {
3
- "total_size": 16060522496
4
  },
5
  "weight_map": {
6
- "lm_head.weight": "model-00004-of-00004.safetensors",
7
- "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
- "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
- "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
- "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
- "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
- "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
- "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
14
- "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
15
- "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
16
- "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
17
- "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
18
- "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
19
- "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
20
- "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
21
- "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
22
- "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
23
- "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
24
- "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
25
- "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
26
- "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
27
- "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
28
- "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
29
- "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
30
- "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
31
- "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
32
- "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
33
- "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
34
- "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
35
- "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
36
- "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
37
- "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
38
- "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
39
- "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
40
- "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
41
- "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
42
- "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
43
- "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
- "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
45
- "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
46
- "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
- "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
- "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
49
- "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
50
- "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
51
- "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
52
- "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
53
- "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
54
- "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
55
- "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
56
- "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
57
- "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
58
- "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
59
- "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
60
- "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
61
- "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
62
- "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
63
- "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
64
- "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
65
- "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
66
- "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
67
- "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
68
- "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
69
- "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
70
- "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
71
- "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
72
- "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
73
- "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
74
- "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
75
- "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
76
- "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
77
- "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
78
- "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
79
- "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
80
- "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
81
- "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
- "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
83
- "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
84
- "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
- "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
86
- "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
87
- "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
88
- "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
89
- "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
90
- "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
91
- "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
92
- "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
93
- "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
94
- "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
95
- "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
96
- "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
97
- "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
98
- "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors",
99
- "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
100
- "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
101
- "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
102
- "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
103
- "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
104
- "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
105
- "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
106
- "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
107
- "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors",
108
- "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
109
- "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
110
- "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
111
- "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
112
- "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
113
- "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
114
- "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
115
- "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
116
- "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
117
- "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
118
- "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
119
- "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
120
- "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
121
- "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
122
- "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
123
- "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
124
- "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
125
- "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
126
- "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
127
- "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
128
- "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
129
- "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
130
- "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
131
- "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
132
- "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
133
- "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
134
- "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
135
- "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
136
- "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
137
- "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
138
- "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
139
- "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
140
- "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
141
- "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
142
- "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
143
- "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
144
- "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
145
- "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
146
- "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
147
- "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
148
- "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
149
- "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
150
- "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
151
- "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
152
- "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
153
- "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
154
- "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
155
- "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
156
- "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
157
- "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
158
- "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
159
- "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
160
- "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
161
- "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
162
- "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
163
- "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
164
- "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
165
- "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
166
- "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
167
- "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
168
- "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
169
- "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
170
- "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
171
- "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
172
- "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
173
- "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
174
- "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
175
- "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
176
- "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
177
- "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
178
- "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
179
- "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
180
- "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
181
- "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
182
- "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
183
- "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
184
- "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
185
- "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
186
- "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
187
- "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
188
- "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
- "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
190
- "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
191
- "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
192
- "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
193
- "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
194
- "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
195
- "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
196
- "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
197
- "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors",
198
- "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
199
- "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
200
- "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
201
- "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
202
- "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
203
- "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
204
- "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
205
- "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
206
- "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors",
207
- "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
208
- "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
209
- "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
210
- "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
211
- "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
212
- "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
213
- "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
214
- "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
215
- "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
216
- "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
217
- "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
218
- "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
219
- "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
220
- "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
221
- "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
222
- "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
223
- "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
224
- "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors",
225
- "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
- "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
- "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
228
- "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
229
- "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
230
- "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
231
- "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
232
- "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
233
- "model.layers.31.input_layernorm.weight": "model-00004-of-00004.safetensors",
234
- "model.layers.31.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
235
- "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
236
- "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
237
- "model.layers.31.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
238
- "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
239
- "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
240
- "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
241
- "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
242
- "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
243
- "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
244
- "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
245
- "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
246
- "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
247
- "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
248
- "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
249
- "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
250
- "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
251
- "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
252
- "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
253
- "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
254
- "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
255
- "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
256
- "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
257
- "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
258
- "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
259
- "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
260
- "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
- "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
262
- "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
263
- "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
264
- "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
- "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
266
- "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
267
- "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
268
- "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
269
- "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
270
- "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
271
- "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
272
- "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
273
- "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
274
- "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
275
- "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
276
- "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
277
- "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
278
- "model.layers.8.input_layernorm.weight": "model-00001-of-00004.safetensors",
279
- "model.layers.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
280
- "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
281
- "model.layers.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
282
- "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
283
- "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
284
- "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
285
- "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
286
- "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
287
- "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
288
- "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
289
- "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
290
- "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
291
- "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
292
- "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
293
- "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
294
- "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
295
- "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
296
- "model.norm.weight": "model-00004-of-00004.safetensors"
297
  }
298
  }
 
1
  {
2
  "metadata": {
3
+ "total_size": 14483464192
4
  },
5
  "weight_map": {
6
+ "lm_head.weight": "model-00003-of-00003.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00003.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00003.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
242
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
243
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
244
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
245
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
246
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
247
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
248
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
249
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
250
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
251
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
252
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
253
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
254
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
255
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
256
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
257
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
258
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
259
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
260
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
261
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
262
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
263
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
264
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
265
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
266
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
267
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
268
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
269
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
270
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
271
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
272
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
273
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
274
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
275
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
276
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
277
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
278
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
279
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
280
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
281
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
282
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
283
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
284
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
285
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
286
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
287
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
288
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
289
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
290
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
291
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
292
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
293
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
294
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
295
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
296
+ "model.norm.weight": "model-00003-of-00003.safetensors"
297
  }
298
  }
special_tokens_map.json CHANGED
@@ -1,17 +1,35 @@
1
  {
 
 
 
 
 
2
  "bos_token": {
3
- "content": "<|begin_of_text|>",
4
  "lstrip": false,
5
  "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|end_of_text|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "<|end_of_text|>"
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  }
 
1
  {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>"
6
+ ],
7
  "bos_token": {
8
+ "content": "<s>",
9
  "lstrip": false,
10
  "normalized": false,
11
  "rstrip": false,
12
  "single_word": false
13
  },
14
  "eos_token": {
15
+ "content": "</s>",
16
  "lstrip": false,
17
  "normalized": false,
18
  "rstrip": false,
19
  "single_word": false
20
  },
21
+ "pad_token": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false
27
+ },
28
+ "unk_token": {
29
+ "content": "<unk>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false
34
+ }
35
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,2047 +1,23 @@
1
  {
2
  "added_tokens_decoder": {
3
- "128000": {
4
- "content": "<|begin_of_text|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
- "128001": {
12
- "content": "<|end_of_text|>",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  },
19
- "128002": {
20
- "content": "<|reserved_special_token_0|>",
21
- "lstrip": false,
22
- "normalized": false,
23
- "rstrip": false,
24
- "single_word": false,
25
- "special": true
26
- },
27
- "128003": {
28
- "content": "<|reserved_special_token_1|>",
29
- "lstrip": false,
30
- "normalized": false,
31
- "rstrip": false,
32
- "single_word": false,
33
- "special": true
34
- },
35
- "128004": {
36
- "content": "<|reserved_special_token_2|>",
37
- "lstrip": false,
38
- "normalized": false,
39
- "rstrip": false,
40
- "single_word": false,
41
- "special": true
42
- },
43
- "128005": {
44
- "content": "<|reserved_special_token_3|>",
45
- "lstrip": false,
46
- "normalized": false,
47
- "rstrip": false,
48
- "single_word": false,
49
- "special": true
50
- },
51
- "128006": {
52
- "content": "<|start_header_id|>",
53
- "lstrip": false,
54
- "normalized": false,
55
- "rstrip": false,
56
- "single_word": false,
57
- "special": true
58
- },
59
- "128007": {
60
- "content": "<|end_header_id|>",
61
- "lstrip": false,
62
- "normalized": false,
63
- "rstrip": false,
64
- "single_word": false,
65
- "special": true
66
- },
67
- "128008": {
68
- "content": "<|reserved_special_token_4|>",
69
- "lstrip": false,
70
- "normalized": false,
71
- "rstrip": false,
72
- "single_word": false,
73
- "special": true
74
- },
75
- "128009": {
76
- "content": "<|eot_id|>",
77
- "lstrip": false,
78
- "normalized": false,
79
- "rstrip": false,
80
- "single_word": false,
81
- "special": true
82
- },
83
- "128010": {
84
- "content": "<|reserved_special_token_5|>",
85
- "lstrip": false,
86
- "normalized": false,
87
- "rstrip": false,
88
- "single_word": false,
89
- "special": true
90
- },
91
- "128011": {
92
- "content": "<|reserved_special_token_6|>",
93
- "lstrip": false,
94
- "normalized": false,
95
- "rstrip": false,
96
- "single_word": false,
97
- "special": true
98
- },
99
- "128012": {
100
- "content": "<|reserved_special_token_7|>",
101
- "lstrip": false,
102
- "normalized": false,
103
- "rstrip": false,
104
- "single_word": false,
105
- "special": true
106
- },
107
- "128013": {
108
- "content": "<|reserved_special_token_8|>",
109
- "lstrip": false,
110
- "normalized": false,
111
- "rstrip": false,
112
- "single_word": false,
113
- "special": true
114
- },
115
- "128014": {
116
- "content": "<|reserved_special_token_9|>",
117
- "lstrip": false,
118
- "normalized": false,
119
- "rstrip": false,
120
- "single_word": false,
121
- "special": true
122
- },
123
- "128015": {
124
- "content": "<|reserved_special_token_10|>",
125
- "lstrip": false,
126
- "normalized": false,
127
- "rstrip": false,
128
- "single_word": false,
129
- "special": true
130
- },
131
- "128016": {
132
- "content": "<|reserved_special_token_11|>",
133
- "lstrip": false,
134
- "normalized": false,
135
- "rstrip": false,
136
- "single_word": false,
137
- "special": true
138
- },
139
- "128017": {
140
- "content": "<|reserved_special_token_12|>",
141
- "lstrip": false,
142
- "normalized": false,
143
- "rstrip": false,
144
- "single_word": false,
145
- "special": true
146
- },
147
- "128018": {
148
- "content": "<|reserved_special_token_13|>",
149
- "lstrip": false,
150
- "normalized": false,
151
- "rstrip": false,
152
- "single_word": false,
153
- "special": true
154
- },
155
- "128019": {
156
- "content": "<|reserved_special_token_14|>",
157
- "lstrip": false,
158
- "normalized": false,
159
- "rstrip": false,
160
- "single_word": false,
161
- "special": true
162
- },
163
- "128020": {
164
- "content": "<|reserved_special_token_15|>",
165
- "lstrip": false,
166
- "normalized": false,
167
- "rstrip": false,
168
- "single_word": false,
169
- "special": true
170
- },
171
- "128021": {
172
- "content": "<|reserved_special_token_16|>",
173
- "lstrip": false,
174
- "normalized": false,
175
- "rstrip": false,
176
- "single_word": false,
177
- "special": true
178
- },
179
- "128022": {
180
- "content": "<|reserved_special_token_17|>",
181
- "lstrip": false,
182
- "normalized": false,
183
- "rstrip": false,
184
- "single_word": false,
185
- "special": true
186
- },
187
- "128023": {
188
- "content": "<|reserved_special_token_18|>",
189
- "lstrip": false,
190
- "normalized": false,
191
- "rstrip": false,
192
- "single_word": false,
193
- "special": true
194
- },
195
- "128024": {
196
- "content": "<|reserved_special_token_19|>",
197
- "lstrip": false,
198
- "normalized": false,
199
- "rstrip": false,
200
- "single_word": false,
201
- "special": true
202
- },
203
- "128025": {
204
- "content": "<|reserved_special_token_20|>",
205
- "lstrip": false,
206
- "normalized": false,
207
- "rstrip": false,
208
- "single_word": false,
209
- "special": true
210
- },
211
- "128026": {
212
- "content": "<|reserved_special_token_21|>",
213
- "lstrip": false,
214
- "normalized": false,
215
- "rstrip": false,
216
- "single_word": false,
217
- "special": true
218
- },
219
- "128027": {
220
- "content": "<|reserved_special_token_22|>",
221
- "lstrip": false,
222
- "normalized": false,
223
- "rstrip": false,
224
- "single_word": false,
225
- "special": true
226
- },
227
- "128028": {
228
- "content": "<|reserved_special_token_23|>",
229
- "lstrip": false,
230
- "normalized": false,
231
- "rstrip": false,
232
- "single_word": false,
233
- "special": true
234
- },
235
- "128029": {
236
- "content": "<|reserved_special_token_24|>",
237
- "lstrip": false,
238
- "normalized": false,
239
- "rstrip": false,
240
- "single_word": false,
241
- "special": true
242
- },
243
- "128030": {
244
- "content": "<|reserved_special_token_25|>",
245
- "lstrip": false,
246
- "normalized": false,
247
- "rstrip": false,
248
- "single_word": false,
249
- "special": true
250
- },
251
- "128031": {
252
- "content": "<|reserved_special_token_26|>",
253
- "lstrip": false,
254
- "normalized": false,
255
- "rstrip": false,
256
- "single_word": false,
257
- "special": true
258
- },
259
- "128032": {
260
- "content": "<|reserved_special_token_27|>",
261
- "lstrip": false,
262
- "normalized": false,
263
- "rstrip": false,
264
- "single_word": false,
265
- "special": true
266
- },
267
- "128033": {
268
- "content": "<|reserved_special_token_28|>",
269
- "lstrip": false,
270
- "normalized": false,
271
- "rstrip": false,
272
- "single_word": false,
273
- "special": true
274
- },
275
- "128034": {
276
- "content": "<|reserved_special_token_29|>",
277
- "lstrip": false,
278
- "normalized": false,
279
- "rstrip": false,
280
- "single_word": false,
281
- "special": true
282
- },
283
- "128035": {
284
- "content": "<|reserved_special_token_30|>",
285
- "lstrip": false,
286
- "normalized": false,
287
- "rstrip": false,
288
- "single_word": false,
289
- "special": true
290
- },
291
- "128036": {
292
- "content": "<|reserved_special_token_31|>",
293
- "lstrip": false,
294
- "normalized": false,
295
- "rstrip": false,
296
- "single_word": false,
297
- "special": true
298
- },
299
- "128037": {
300
- "content": "<|reserved_special_token_32|>",
301
- "lstrip": false,
302
- "normalized": false,
303
- "rstrip": false,
304
- "single_word": false,
305
- "special": true
306
- },
307
- "128038": {
308
- "content": "<|reserved_special_token_33|>",
309
- "lstrip": false,
310
- "normalized": false,
311
- "rstrip": false,
312
- "single_word": false,
313
- "special": true
314
- },
315
- "128039": {
316
- "content": "<|reserved_special_token_34|>",
317
- "lstrip": false,
318
- "normalized": false,
319
- "rstrip": false,
320
- "single_word": false,
321
- "special": true
322
- },
323
- "128040": {
324
- "content": "<|reserved_special_token_35|>",
325
- "lstrip": false,
326
- "normalized": false,
327
- "rstrip": false,
328
- "single_word": false,
329
- "special": true
330
- },
331
- "128041": {
332
- "content": "<|reserved_special_token_36|>",
333
- "lstrip": false,
334
- "normalized": false,
335
- "rstrip": false,
336
- "single_word": false,
337
- "special": true
338
- },
339
- "128042": {
340
- "content": "<|reserved_special_token_37|>",
341
- "lstrip": false,
342
- "normalized": false,
343
- "rstrip": false,
344
- "single_word": false,
345
- "special": true
346
- },
347
- "128043": {
348
- "content": "<|reserved_special_token_38|>",
349
- "lstrip": false,
350
- "normalized": false,
351
- "rstrip": false,
352
- "single_word": false,
353
- "special": true
354
- },
355
- "128044": {
356
- "content": "<|reserved_special_token_39|>",
357
- "lstrip": false,
358
- "normalized": false,
359
- "rstrip": false,
360
- "single_word": false,
361
- "special": true
362
- },
363
- "128045": {
364
- "content": "<|reserved_special_token_40|>",
365
- "lstrip": false,
366
- "normalized": false,
367
- "rstrip": false,
368
- "single_word": false,
369
- "special": true
370
- },
371
- "128046": {
372
- "content": "<|reserved_special_token_41|>",
373
- "lstrip": false,
374
- "normalized": false,
375
- "rstrip": false,
376
- "single_word": false,
377
- "special": true
378
- },
379
- "128047": {
380
- "content": "<|reserved_special_token_42|>",
381
- "lstrip": false,
382
- "normalized": false,
383
- "rstrip": false,
384
- "single_word": false,
385
- "special": true
386
- },
387
- "128048": {
388
- "content": "<|reserved_special_token_43|>",
389
- "lstrip": false,
390
- "normalized": false,
391
- "rstrip": false,
392
- "single_word": false,
393
- "special": true
394
- },
395
- "128049": {
396
- "content": "<|reserved_special_token_44|>",
397
- "lstrip": false,
398
- "normalized": false,
399
- "rstrip": false,
400
- "single_word": false,
401
- "special": true
402
- },
403
- "128050": {
404
- "content": "<|reserved_special_token_45|>",
405
- "lstrip": false,
406
- "normalized": false,
407
- "rstrip": false,
408
- "single_word": false,
409
- "special": true
410
- },
411
- "128051": {
412
- "content": "<|reserved_special_token_46|>",
413
- "lstrip": false,
414
- "normalized": false,
415
- "rstrip": false,
416
- "single_word": false,
417
- "special": true
418
- },
419
- "128052": {
420
- "content": "<|reserved_special_token_47|>",
421
- "lstrip": false,
422
- "normalized": false,
423
- "rstrip": false,
424
- "single_word": false,
425
- "special": true
426
- },
427
- "128053": {
428
- "content": "<|reserved_special_token_48|>",
429
- "lstrip": false,
430
- "normalized": false,
431
- "rstrip": false,
432
- "single_word": false,
433
- "special": true
434
- },
435
- "128054": {
436
- "content": "<|reserved_special_token_49|>",
437
- "lstrip": false,
438
- "normalized": false,
439
- "rstrip": false,
440
- "single_word": false,
441
- "special": true
442
- },
443
- "128055": {
444
- "content": "<|reserved_special_token_50|>",
445
- "lstrip": false,
446
- "normalized": false,
447
- "rstrip": false,
448
- "single_word": false,
449
- "special": true
450
- },
451
- "128056": {
452
- "content": "<|reserved_special_token_51|>",
453
- "lstrip": false,
454
- "normalized": false,
455
- "rstrip": false,
456
- "single_word": false,
457
- "special": true
458
- },
459
- "128057": {
460
- "content": "<|reserved_special_token_52|>",
461
- "lstrip": false,
462
- "normalized": false,
463
- "rstrip": false,
464
- "single_word": false,
465
- "special": true
466
- },
467
- "128058": {
468
- "content": "<|reserved_special_token_53|>",
469
- "lstrip": false,
470
- "normalized": false,
471
- "rstrip": false,
472
- "single_word": false,
473
- "special": true
474
- },
475
- "128059": {
476
- "content": "<|reserved_special_token_54|>",
477
- "lstrip": false,
478
- "normalized": false,
479
- "rstrip": false,
480
- "single_word": false,
481
- "special": true
482
- },
483
- "128060": {
484
- "content": "<|reserved_special_token_55|>",
485
- "lstrip": false,
486
- "normalized": false,
487
- "rstrip": false,
488
- "single_word": false,
489
- "special": true
490
- },
491
- "128061": {
492
- "content": "<|reserved_special_token_56|>",
493
- "lstrip": false,
494
- "normalized": false,
495
- "rstrip": false,
496
- "single_word": false,
497
- "special": true
498
- },
499
- "128062": {
500
- "content": "<|reserved_special_token_57|>",
501
- "lstrip": false,
502
- "normalized": false,
503
- "rstrip": false,
504
- "single_word": false,
505
- "special": true
506
- },
507
- "128063": {
508
- "content": "<|reserved_special_token_58|>",
509
- "lstrip": false,
510
- "normalized": false,
511
- "rstrip": false,
512
- "single_word": false,
513
- "special": true
514
- },
515
- "128064": {
516
- "content": "<|reserved_special_token_59|>",
517
- "lstrip": false,
518
- "normalized": false,
519
- "rstrip": false,
520
- "single_word": false,
521
- "special": true
522
- },
523
- "128065": {
524
- "content": "<|reserved_special_token_60|>",
525
- "lstrip": false,
526
- "normalized": false,
527
- "rstrip": false,
528
- "single_word": false,
529
- "special": true
530
- },
531
- "128066": {
532
- "content": "<|reserved_special_token_61|>",
533
- "lstrip": false,
534
- "normalized": false,
535
- "rstrip": false,
536
- "single_word": false,
537
- "special": true
538
- },
539
- "128067": {
540
- "content": "<|reserved_special_token_62|>",
541
- "lstrip": false,
542
- "normalized": false,
543
- "rstrip": false,
544
- "single_word": false,
545
- "special": true
546
- },
547
- "128068": {
548
- "content": "<|reserved_special_token_63|>",
549
- "lstrip": false,
550
- "normalized": false,
551
- "rstrip": false,
552
- "single_word": false,
553
- "special": true
554
- },
555
- "128069": {
556
- "content": "<|reserved_special_token_64|>",
557
- "lstrip": false,
558
- "normalized": false,
559
- "rstrip": false,
560
- "single_word": false,
561
- "special": true
562
- },
563
- "128070": {
564
- "content": "<|reserved_special_token_65|>",
565
- "lstrip": false,
566
- "normalized": false,
567
- "rstrip": false,
568
- "single_word": false,
569
- "special": true
570
- },
571
- "128071": {
572
- "content": "<|reserved_special_token_66|>",
573
- "lstrip": false,
574
- "normalized": false,
575
- "rstrip": false,
576
- "single_word": false,
577
- "special": true
578
- },
579
- "128072": {
580
- "content": "<|reserved_special_token_67|>",
581
- "lstrip": false,
582
- "normalized": false,
583
- "rstrip": false,
584
- "single_word": false,
585
- "special": true
586
- },
587
- "128073": {
588
- "content": "<|reserved_special_token_68|>",
589
- "lstrip": false,
590
- "normalized": false,
591
- "rstrip": false,
592
- "single_word": false,
593
- "special": true
594
- },
595
- "128074": {
596
- "content": "<|reserved_special_token_69|>",
597
- "lstrip": false,
598
- "normalized": false,
599
- "rstrip": false,
600
- "single_word": false,
601
- "special": true
602
- },
603
- "128075": {
604
- "content": "<|reserved_special_token_70|>",
605
- "lstrip": false,
606
- "normalized": false,
607
- "rstrip": false,
608
- "single_word": false,
609
- "special": true
610
- },
611
- "128076": {
612
- "content": "<|reserved_special_token_71|>",
613
- "lstrip": false,
614
- "normalized": false,
615
- "rstrip": false,
616
- "single_word": false,
617
- "special": true
618
- },
619
- "128077": {
620
- "content": "<|reserved_special_token_72|>",
621
- "lstrip": false,
622
- "normalized": false,
623
- "rstrip": false,
624
- "single_word": false,
625
- "special": true
626
- },
627
- "128078": {
628
- "content": "<|reserved_special_token_73|>",
629
- "lstrip": false,
630
- "normalized": false,
631
- "rstrip": false,
632
- "single_word": false,
633
- "special": true
634
- },
635
- "128079": {
636
- "content": "<|reserved_special_token_74|>",
637
- "lstrip": false,
638
- "normalized": false,
639
- "rstrip": false,
640
- "single_word": false,
641
- "special": true
642
- },
643
- "128080": {
644
- "content": "<|reserved_special_token_75|>",
645
- "lstrip": false,
646
- "normalized": false,
647
- "rstrip": false,
648
- "single_word": false,
649
- "special": true
650
- },
651
- "128081": {
652
- "content": "<|reserved_special_token_76|>",
653
- "lstrip": false,
654
- "normalized": false,
655
- "rstrip": false,
656
- "single_word": false,
657
- "special": true
658
- },
659
- "128082": {
660
- "content": "<|reserved_special_token_77|>",
661
- "lstrip": false,
662
- "normalized": false,
663
- "rstrip": false,
664
- "single_word": false,
665
- "special": true
666
- },
667
- "128083": {
668
- "content": "<|reserved_special_token_78|>",
669
- "lstrip": false,
670
- "normalized": false,
671
- "rstrip": false,
672
- "single_word": false,
673
- "special": true
674
- },
675
- "128084": {
676
- "content": "<|reserved_special_token_79|>",
677
- "lstrip": false,
678
- "normalized": false,
679
- "rstrip": false,
680
- "single_word": false,
681
- "special": true
682
- },
683
- "128085": {
684
- "content": "<|reserved_special_token_80|>",
685
- "lstrip": false,
686
- "normalized": false,
687
- "rstrip": false,
688
- "single_word": false,
689
- "special": true
690
- },
691
- "128086": {
692
- "content": "<|reserved_special_token_81|>",
693
- "lstrip": false,
694
- "normalized": false,
695
- "rstrip": false,
696
- "single_word": false,
697
- "special": true
698
- },
699
- "128087": {
700
- "content": "<|reserved_special_token_82|>",
701
- "lstrip": false,
702
- "normalized": false,
703
- "rstrip": false,
704
- "single_word": false,
705
- "special": true
706
- },
707
- "128088": {
708
- "content": "<|reserved_special_token_83|>",
709
- "lstrip": false,
710
- "normalized": false,
711
- "rstrip": false,
712
- "single_word": false,
713
- "special": true
714
- },
715
- "128089": {
716
- "content": "<|reserved_special_token_84|>",
717
- "lstrip": false,
718
- "normalized": false,
719
- "rstrip": false,
720
- "single_word": false,
721
- "special": true
722
- },
723
- "128090": {
724
- "content": "<|reserved_special_token_85|>",
725
- "lstrip": false,
726
- "normalized": false,
727
- "rstrip": false,
728
- "single_word": false,
729
- "special": true
730
- },
731
- "128091": {
732
- "content": "<|reserved_special_token_86|>",
733
- "lstrip": false,
734
- "normalized": false,
735
- "rstrip": false,
736
- "single_word": false,
737
- "special": true
738
- },
739
- "128092": {
740
- "content": "<|reserved_special_token_87|>",
741
- "lstrip": false,
742
- "normalized": false,
743
- "rstrip": false,
744
- "single_word": false,
745
- "special": true
746
- },
747
- "128093": {
748
- "content": "<|reserved_special_token_88|>",
749
- "lstrip": false,
750
- "normalized": false,
751
- "rstrip": false,
752
- "single_word": false,
753
- "special": true
754
- },
755
- "128094": {
756
- "content": "<|reserved_special_token_89|>",
757
- "lstrip": false,
758
- "normalized": false,
759
- "rstrip": false,
760
- "single_word": false,
761
- "special": true
762
- },
763
- "128095": {
764
- "content": "<|reserved_special_token_90|>",
765
- "lstrip": false,
766
- "normalized": false,
767
- "rstrip": false,
768
- "single_word": false,
769
- "special": true
770
- },
771
- "128096": {
772
- "content": "<|reserved_special_token_91|>",
773
- "lstrip": false,
774
- "normalized": false,
775
- "rstrip": false,
776
- "single_word": false,
777
- "special": true
778
- },
779
- "128097": {
780
- "content": "<|reserved_special_token_92|>",
781
- "lstrip": false,
782
- "normalized": false,
783
- "rstrip": false,
784
- "single_word": false,
785
- "special": true
786
- },
787
- "128098": {
788
- "content": "<|reserved_special_token_93|>",
789
- "lstrip": false,
790
- "normalized": false,
791
- "rstrip": false,
792
- "single_word": false,
793
- "special": true
794
- },
795
- "128099": {
796
- "content": "<|reserved_special_token_94|>",
797
- "lstrip": false,
798
- "normalized": false,
799
- "rstrip": false,
800
- "single_word": false,
801
- "special": true
802
- },
803
- "128100": {
804
- "content": "<|reserved_special_token_95|>",
805
- "lstrip": false,
806
- "normalized": false,
807
- "rstrip": false,
808
- "single_word": false,
809
- "special": true
810
- },
811
- "128101": {
812
- "content": "<|reserved_special_token_96|>",
813
- "lstrip": false,
814
- "normalized": false,
815
- "rstrip": false,
816
- "single_word": false,
817
- "special": true
818
- },
819
- "128102": {
820
- "content": "<|reserved_special_token_97|>",
821
- "lstrip": false,
822
- "normalized": false,
823
- "rstrip": false,
824
- "single_word": false,
825
- "special": true
826
- },
827
- "128103": {
828
- "content": "<|reserved_special_token_98|>",
829
- "lstrip": false,
830
- "normalized": false,
831
- "rstrip": false,
832
- "single_word": false,
833
- "special": true
834
- },
835
- "128104": {
836
- "content": "<|reserved_special_token_99|>",
837
- "lstrip": false,
838
- "normalized": false,
839
- "rstrip": false,
840
- "single_word": false,
841
- "special": true
842
- },
843
- "128105": {
844
- "content": "<|reserved_special_token_100|>",
845
- "lstrip": false,
846
- "normalized": false,
847
- "rstrip": false,
848
- "single_word": false,
849
- "special": true
850
- },
851
- "128106": {
852
- "content": "<|reserved_special_token_101|>",
853
- "lstrip": false,
854
- "normalized": false,
855
- "rstrip": false,
856
- "single_word": false,
857
- "special": true
858
- },
859
- "128107": {
860
- "content": "<|reserved_special_token_102|>",
861
- "lstrip": false,
862
- "normalized": false,
863
- "rstrip": false,
864
- "single_word": false,
865
- "special": true
866
- },
867
- "128108": {
868
- "content": "<|reserved_special_token_103|>",
869
- "lstrip": false,
870
- "normalized": false,
871
- "rstrip": false,
872
- "single_word": false,
873
- "special": true
874
- },
875
- "128109": {
876
- "content": "<|reserved_special_token_104|>",
877
- "lstrip": false,
878
- "normalized": false,
879
- "rstrip": false,
880
- "single_word": false,
881
- "special": true
882
- },
883
- "128110": {
884
- "content": "<|reserved_special_token_105|>",
885
- "lstrip": false,
886
- "normalized": false,
887
- "rstrip": false,
888
- "single_word": false,
889
- "special": true
890
- },
891
- "128111": {
892
- "content": "<|reserved_special_token_106|>",
893
- "lstrip": false,
894
- "normalized": false,
895
- "rstrip": false,
896
- "single_word": false,
897
- "special": true
898
- },
899
- "128112": {
900
- "content": "<|reserved_special_token_107|>",
901
- "lstrip": false,
902
- "normalized": false,
903
- "rstrip": false,
904
- "single_word": false,
905
- "special": true
906
- },
907
- "128113": {
908
- "content": "<|reserved_special_token_108|>",
909
- "lstrip": false,
910
- "normalized": false,
911
- "rstrip": false,
912
- "single_word": false,
913
- "special": true
914
- },
915
- "128114": {
916
- "content": "<|reserved_special_token_109|>",
917
- "lstrip": false,
918
- "normalized": false,
919
- "rstrip": false,
920
- "single_word": false,
921
- "special": true
922
- },
923
- "128115": {
924
- "content": "<|reserved_special_token_110|>",
925
- "lstrip": false,
926
- "normalized": false,
927
- "rstrip": false,
928
- "single_word": false,
929
- "special": true
930
- },
931
- "128116": {
932
- "content": "<|reserved_special_token_111|>",
933
- "lstrip": false,
934
- "normalized": false,
935
- "rstrip": false,
936
- "single_word": false,
937
- "special": true
938
- },
939
- "128117": {
940
- "content": "<|reserved_special_token_112|>",
941
- "lstrip": false,
942
- "normalized": false,
943
- "rstrip": false,
944
- "single_word": false,
945
- "special": true
946
- },
947
- "128118": {
948
- "content": "<|reserved_special_token_113|>",
949
- "lstrip": false,
950
- "normalized": false,
951
- "rstrip": false,
952
- "single_word": false,
953
- "special": true
954
- },
955
- "128119": {
956
- "content": "<|reserved_special_token_114|>",
957
- "lstrip": false,
958
- "normalized": false,
959
- "rstrip": false,
960
- "single_word": false,
961
- "special": true
962
- },
963
- "128120": {
964
- "content": "<|reserved_special_token_115|>",
965
- "lstrip": false,
966
- "normalized": false,
967
- "rstrip": false,
968
- "single_word": false,
969
- "special": true
970
- },
971
- "128121": {
972
- "content": "<|reserved_special_token_116|>",
973
- "lstrip": false,
974
- "normalized": false,
975
- "rstrip": false,
976
- "single_word": false,
977
- "special": true
978
- },
979
- "128122": {
980
- "content": "<|reserved_special_token_117|>",
981
- "lstrip": false,
982
- "normalized": false,
983
- "rstrip": false,
984
- "single_word": false,
985
- "special": true
986
- },
987
- "128123": {
988
- "content": "<|reserved_special_token_118|>",
989
- "lstrip": false,
990
- "normalized": false,
991
- "rstrip": false,
992
- "single_word": false,
993
- "special": true
994
- },
995
- "128124": {
996
- "content": "<|reserved_special_token_119|>",
997
- "lstrip": false,
998
- "normalized": false,
999
- "rstrip": false,
1000
- "single_word": false,
1001
- "special": true
1002
- },
1003
- "128125": {
1004
- "content": "<|reserved_special_token_120|>",
1005
- "lstrip": false,
1006
- "normalized": false,
1007
- "rstrip": false,
1008
- "single_word": false,
1009
- "special": true
1010
- },
1011
- "128126": {
1012
- "content": "<|reserved_special_token_121|>",
1013
- "lstrip": false,
1014
- "normalized": false,
1015
- "rstrip": false,
1016
- "single_word": false,
1017
- "special": true
1018
- },
1019
- "128127": {
1020
- "content": "<|reserved_special_token_122|>",
1021
- "lstrip": false,
1022
- "normalized": false,
1023
- "rstrip": false,
1024
- "single_word": false,
1025
- "special": true
1026
- },
1027
- "128128": {
1028
- "content": "<|reserved_special_token_123|>",
1029
- "lstrip": false,
1030
- "normalized": false,
1031
- "rstrip": false,
1032
- "single_word": false,
1033
- "special": true
1034
- },
1035
- "128129": {
1036
- "content": "<|reserved_special_token_124|>",
1037
- "lstrip": false,
1038
- "normalized": false,
1039
- "rstrip": false,
1040
- "single_word": false,
1041
- "special": true
1042
- },
1043
- "128130": {
1044
- "content": "<|reserved_special_token_125|>",
1045
- "lstrip": false,
1046
- "normalized": false,
1047
- "rstrip": false,
1048
- "single_word": false,
1049
- "special": true
1050
- },
1051
- "128131": {
1052
- "content": "<|reserved_special_token_126|>",
1053
- "lstrip": false,
1054
- "normalized": false,
1055
- "rstrip": false,
1056
- "single_word": false,
1057
- "special": true
1058
- },
1059
- "128132": {
1060
- "content": "<|reserved_special_token_127|>",
1061
- "lstrip": false,
1062
- "normalized": false,
1063
- "rstrip": false,
1064
- "single_word": false,
1065
- "special": true
1066
- },
1067
- "128133": {
1068
- "content": "<|reserved_special_token_128|>",
1069
- "lstrip": false,
1070
- "normalized": false,
1071
- "rstrip": false,
1072
- "single_word": false,
1073
- "special": true
1074
- },
1075
- "128134": {
1076
- "content": "<|reserved_special_token_129|>",
1077
- "lstrip": false,
1078
- "normalized": false,
1079
- "rstrip": false,
1080
- "single_word": false,
1081
- "special": true
1082
- },
1083
- "128135": {
1084
- "content": "<|reserved_special_token_130|>",
1085
- "lstrip": false,
1086
- "normalized": false,
1087
- "rstrip": false,
1088
- "single_word": false,
1089
- "special": true
1090
- },
1091
- "128136": {
1092
- "content": "<|reserved_special_token_131|>",
1093
- "lstrip": false,
1094
- "normalized": false,
1095
- "rstrip": false,
1096
- "single_word": false,
1097
- "special": true
1098
- },
1099
- "128137": {
1100
- "content": "<|reserved_special_token_132|>",
1101
- "lstrip": false,
1102
- "normalized": false,
1103
- "rstrip": false,
1104
- "single_word": false,
1105
- "special": true
1106
- },
1107
- "128138": {
1108
- "content": "<|reserved_special_token_133|>",
1109
- "lstrip": false,
1110
- "normalized": false,
1111
- "rstrip": false,
1112
- "single_word": false,
1113
- "special": true
1114
- },
1115
- "128139": {
1116
- "content": "<|reserved_special_token_134|>",
1117
- "lstrip": false,
1118
- "normalized": false,
1119
- "rstrip": false,
1120
- "single_word": false,
1121
- "special": true
1122
- },
1123
- "128140": {
1124
- "content": "<|reserved_special_token_135|>",
1125
- "lstrip": false,
1126
- "normalized": false,
1127
- "rstrip": false,
1128
- "single_word": false,
1129
- "special": true
1130
- },
1131
- "128141": {
1132
- "content": "<|reserved_special_token_136|>",
1133
- "lstrip": false,
1134
- "normalized": false,
1135
- "rstrip": false,
1136
- "single_word": false,
1137
- "special": true
1138
- },
1139
- "128142": {
1140
- "content": "<|reserved_special_token_137|>",
1141
- "lstrip": false,
1142
- "normalized": false,
1143
- "rstrip": false,
1144
- "single_word": false,
1145
- "special": true
1146
- },
1147
- "128143": {
1148
- "content": "<|reserved_special_token_138|>",
1149
- "lstrip": false,
1150
- "normalized": false,
1151
- "rstrip": false,
1152
- "single_word": false,
1153
- "special": true
1154
- },
1155
- "128144": {
1156
- "content": "<|reserved_special_token_139|>",
1157
- "lstrip": false,
1158
- "normalized": false,
1159
- "rstrip": false,
1160
- "single_word": false,
1161
- "special": true
1162
- },
1163
- "128145": {
1164
- "content": "<|reserved_special_token_140|>",
1165
- "lstrip": false,
1166
- "normalized": false,
1167
- "rstrip": false,
1168
- "single_word": false,
1169
- "special": true
1170
- },
1171
- "128146": {
1172
- "content": "<|reserved_special_token_141|>",
1173
- "lstrip": false,
1174
- "normalized": false,
1175
- "rstrip": false,
1176
- "single_word": false,
1177
- "special": true
1178
- },
1179
- "128147": {
1180
- "content": "<|reserved_special_token_142|>",
1181
- "lstrip": false,
1182
- "normalized": false,
1183
- "rstrip": false,
1184
- "single_word": false,
1185
- "special": true
1186
- },
1187
- "128148": {
1188
- "content": "<|reserved_special_token_143|>",
1189
- "lstrip": false,
1190
- "normalized": false,
1191
- "rstrip": false,
1192
- "single_word": false,
1193
- "special": true
1194
- },
1195
- "128149": {
1196
- "content": "<|reserved_special_token_144|>",
1197
- "lstrip": false,
1198
- "normalized": false,
1199
- "rstrip": false,
1200
- "single_word": false,
1201
- "special": true
1202
- },
1203
- "128150": {
1204
- "content": "<|reserved_special_token_145|>",
1205
- "lstrip": false,
1206
- "normalized": false,
1207
- "rstrip": false,
1208
- "single_word": false,
1209
- "special": true
1210
- },
1211
- "128151": {
1212
- "content": "<|reserved_special_token_146|>",
1213
- "lstrip": false,
1214
- "normalized": false,
1215
- "rstrip": false,
1216
- "single_word": false,
1217
- "special": true
1218
- },
1219
- "128152": {
1220
- "content": "<|reserved_special_token_147|>",
1221
- "lstrip": false,
1222
- "normalized": false,
1223
- "rstrip": false,
1224
- "single_word": false,
1225
- "special": true
1226
- },
1227
- "128153": {
1228
- "content": "<|reserved_special_token_148|>",
1229
- "lstrip": false,
1230
- "normalized": false,
1231
- "rstrip": false,
1232
- "single_word": false,
1233
- "special": true
1234
- },
1235
- "128154": {
1236
- "content": "<|reserved_special_token_149|>",
1237
- "lstrip": false,
1238
- "normalized": false,
1239
- "rstrip": false,
1240
- "single_word": false,
1241
- "special": true
1242
- },
1243
- "128155": {
1244
- "content": "<|reserved_special_token_150|>",
1245
- "lstrip": false,
1246
- "normalized": false,
1247
- "rstrip": false,
1248
- "single_word": false,
1249
- "special": true
1250
- },
1251
- "128156": {
1252
- "content": "<|reserved_special_token_151|>",
1253
- "lstrip": false,
1254
- "normalized": false,
1255
- "rstrip": false,
1256
- "single_word": false,
1257
- "special": true
1258
- },
1259
- "128157": {
1260
- "content": "<|reserved_special_token_152|>",
1261
- "lstrip": false,
1262
- "normalized": false,
1263
- "rstrip": false,
1264
- "single_word": false,
1265
- "special": true
1266
- },
1267
- "128158": {
1268
- "content": "<|reserved_special_token_153|>",
1269
- "lstrip": false,
1270
- "normalized": false,
1271
- "rstrip": false,
1272
- "single_word": false,
1273
- "special": true
1274
- },
1275
- "128159": {
1276
- "content": "<|reserved_special_token_154|>",
1277
- "lstrip": false,
1278
- "normalized": false,
1279
- "rstrip": false,
1280
- "single_word": false,
1281
- "special": true
1282
- },
1283
- "128160": {
1284
- "content": "<|reserved_special_token_155|>",
1285
- "lstrip": false,
1286
- "normalized": false,
1287
- "rstrip": false,
1288
- "single_word": false,
1289
- "special": true
1290
- },
1291
- "128161": {
1292
- "content": "<|reserved_special_token_156|>",
1293
- "lstrip": false,
1294
- "normalized": false,
1295
- "rstrip": false,
1296
- "single_word": false,
1297
- "special": true
1298
- },
1299
- "128162": {
1300
- "content": "<|reserved_special_token_157|>",
1301
- "lstrip": false,
1302
- "normalized": false,
1303
- "rstrip": false,
1304
- "single_word": false,
1305
- "special": true
1306
- },
1307
- "128163": {
1308
- "content": "<|reserved_special_token_158|>",
1309
- "lstrip": false,
1310
- "normalized": false,
1311
- "rstrip": false,
1312
- "single_word": false,
1313
- "special": true
1314
- },
1315
- "128164": {
1316
- "content": "<|reserved_special_token_159|>",
1317
- "lstrip": false,
1318
- "normalized": false,
1319
- "rstrip": false,
1320
- "single_word": false,
1321
- "special": true
1322
- },
1323
- "128165": {
1324
- "content": "<|reserved_special_token_160|>",
1325
- "lstrip": false,
1326
- "normalized": false,
1327
- "rstrip": false,
1328
- "single_word": false,
1329
- "special": true
1330
- },
1331
- "128166": {
1332
- "content": "<|reserved_special_token_161|>",
1333
- "lstrip": false,
1334
- "normalized": false,
1335
- "rstrip": false,
1336
- "single_word": false,
1337
- "special": true
1338
- },
1339
- "128167": {
1340
- "content": "<|reserved_special_token_162|>",
1341
- "lstrip": false,
1342
- "normalized": false,
1343
- "rstrip": false,
1344
- "single_word": false,
1345
- "special": true
1346
- },
1347
- "128168": {
1348
- "content": "<|reserved_special_token_163|>",
1349
- "lstrip": false,
1350
- "normalized": false,
1351
- "rstrip": false,
1352
- "single_word": false,
1353
- "special": true
1354
- },
1355
- "128169": {
1356
- "content": "<|reserved_special_token_164|>",
1357
- "lstrip": false,
1358
- "normalized": false,
1359
- "rstrip": false,
1360
- "single_word": false,
1361
- "special": true
1362
- },
1363
- "128170": {
1364
- "content": "<|reserved_special_token_165|>",
1365
- "lstrip": false,
1366
- "normalized": false,
1367
- "rstrip": false,
1368
- "single_word": false,
1369
- "special": true
1370
- },
1371
- "128171": {
1372
- "content": "<|reserved_special_token_166|>",
1373
- "lstrip": false,
1374
- "normalized": false,
1375
- "rstrip": false,
1376
- "single_word": false,
1377
- "special": true
1378
- },
1379
- "128172": {
1380
- "content": "<|reserved_special_token_167|>",
1381
- "lstrip": false,
1382
- "normalized": false,
1383
- "rstrip": false,
1384
- "single_word": false,
1385
- "special": true
1386
- },
1387
- "128173": {
1388
- "content": "<|reserved_special_token_168|>",
1389
- "lstrip": false,
1390
- "normalized": false,
1391
- "rstrip": false,
1392
- "single_word": false,
1393
- "special": true
1394
- },
1395
- "128174": {
1396
- "content": "<|reserved_special_token_169|>",
1397
- "lstrip": false,
1398
- "normalized": false,
1399
- "rstrip": false,
1400
- "single_word": false,
1401
- "special": true
1402
- },
1403
- "128175": {
1404
- "content": "<|reserved_special_token_170|>",
1405
- "lstrip": false,
1406
- "normalized": false,
1407
- "rstrip": false,
1408
- "single_word": false,
1409
- "special": true
1410
- },
1411
- "128176": {
1412
- "content": "<|reserved_special_token_171|>",
1413
- "lstrip": false,
1414
- "normalized": false,
1415
- "rstrip": false,
1416
- "single_word": false,
1417
- "special": true
1418
- },
1419
- "128177": {
1420
- "content": "<|reserved_special_token_172|>",
1421
- "lstrip": false,
1422
- "normalized": false,
1423
- "rstrip": false,
1424
- "single_word": false,
1425
- "special": true
1426
- },
1427
- "128178": {
1428
- "content": "<|reserved_special_token_173|>",
1429
- "lstrip": false,
1430
- "normalized": false,
1431
- "rstrip": false,
1432
- "single_word": false,
1433
- "special": true
1434
- },
1435
- "128179": {
1436
- "content": "<|reserved_special_token_174|>",
1437
- "lstrip": false,
1438
- "normalized": false,
1439
- "rstrip": false,
1440
- "single_word": false,
1441
- "special": true
1442
- },
1443
- "128180": {
1444
- "content": "<|reserved_special_token_175|>",
1445
- "lstrip": false,
1446
- "normalized": false,
1447
- "rstrip": false,
1448
- "single_word": false,
1449
- "special": true
1450
- },
1451
- "128181": {
1452
- "content": "<|reserved_special_token_176|>",
1453
- "lstrip": false,
1454
- "normalized": false,
1455
- "rstrip": false,
1456
- "single_word": false,
1457
- "special": true
1458
- },
1459
- "128182": {
1460
- "content": "<|reserved_special_token_177|>",
1461
- "lstrip": false,
1462
- "normalized": false,
1463
- "rstrip": false,
1464
- "single_word": false,
1465
- "special": true
1466
- },
1467
- "128183": {
1468
- "content": "<|reserved_special_token_178|>",
1469
- "lstrip": false,
1470
- "normalized": false,
1471
- "rstrip": false,
1472
- "single_word": false,
1473
- "special": true
1474
- },
1475
- "128184": {
1476
- "content": "<|reserved_special_token_179|>",
1477
- "lstrip": false,
1478
- "normalized": false,
1479
- "rstrip": false,
1480
- "single_word": false,
1481
- "special": true
1482
- },
1483
- "128185": {
1484
- "content": "<|reserved_special_token_180|>",
1485
- "lstrip": false,
1486
- "normalized": false,
1487
- "rstrip": false,
1488
- "single_word": false,
1489
- "special": true
1490
- },
1491
- "128186": {
1492
- "content": "<|reserved_special_token_181|>",
1493
- "lstrip": false,
1494
- "normalized": false,
1495
- "rstrip": false,
1496
- "single_word": false,
1497
- "special": true
1498
- },
1499
- "128187": {
1500
- "content": "<|reserved_special_token_182|>",
1501
- "lstrip": false,
1502
- "normalized": false,
1503
- "rstrip": false,
1504
- "single_word": false,
1505
- "special": true
1506
- },
1507
- "128188": {
1508
- "content": "<|reserved_special_token_183|>",
1509
- "lstrip": false,
1510
- "normalized": false,
1511
- "rstrip": false,
1512
- "single_word": false,
1513
- "special": true
1514
- },
1515
- "128189": {
1516
- "content": "<|reserved_special_token_184|>",
1517
- "lstrip": false,
1518
- "normalized": false,
1519
- "rstrip": false,
1520
- "single_word": false,
1521
- "special": true
1522
- },
1523
- "128190": {
1524
- "content": "<|reserved_special_token_185|>",
1525
- "lstrip": false,
1526
- "normalized": false,
1527
- "rstrip": false,
1528
- "single_word": false,
1529
- "special": true
1530
- },
1531
- "128191": {
1532
- "content": "<|reserved_special_token_186|>",
1533
- "lstrip": false,
1534
- "normalized": false,
1535
- "rstrip": false,
1536
- "single_word": false,
1537
- "special": true
1538
- },
1539
- "128192": {
1540
- "content": "<|reserved_special_token_187|>",
1541
- "lstrip": false,
1542
- "normalized": false,
1543
- "rstrip": false,
1544
- "single_word": false,
1545
- "special": true
1546
- },
1547
- "128193": {
1548
- "content": "<|reserved_special_token_188|>",
1549
- "lstrip": false,
1550
- "normalized": false,
1551
- "rstrip": false,
1552
- "single_word": false,
1553
- "special": true
1554
- },
1555
- "128194": {
1556
- "content": "<|reserved_special_token_189|>",
1557
- "lstrip": false,
1558
- "normalized": false,
1559
- "rstrip": false,
1560
- "single_word": false,
1561
- "special": true
1562
- },
1563
- "128195": {
1564
- "content": "<|reserved_special_token_190|>",
1565
- "lstrip": false,
1566
- "normalized": false,
1567
- "rstrip": false,
1568
- "single_word": false,
1569
- "special": true
1570
- },
1571
- "128196": {
1572
- "content": "<|reserved_special_token_191|>",
1573
- "lstrip": false,
1574
- "normalized": false,
1575
- "rstrip": false,
1576
- "single_word": false,
1577
- "special": true
1578
- },
1579
- "128197": {
1580
- "content": "<|reserved_special_token_192|>",
1581
- "lstrip": false,
1582
- "normalized": false,
1583
- "rstrip": false,
1584
- "single_word": false,
1585
- "special": true
1586
- },
1587
- "128198": {
1588
- "content": "<|reserved_special_token_193|>",
1589
- "lstrip": false,
1590
- "normalized": false,
1591
- "rstrip": false,
1592
- "single_word": false,
1593
- "special": true
1594
- },
1595
- "128199": {
1596
- "content": "<|reserved_special_token_194|>",
1597
- "lstrip": false,
1598
- "normalized": false,
1599
- "rstrip": false,
1600
- "single_word": false,
1601
- "special": true
1602
- },
1603
- "128200": {
1604
- "content": "<|reserved_special_token_195|>",
1605
- "lstrip": false,
1606
- "normalized": false,
1607
- "rstrip": false,
1608
- "single_word": false,
1609
- "special": true
1610
- },
1611
- "128201": {
1612
- "content": "<|reserved_special_token_196|>",
1613
- "lstrip": false,
1614
- "normalized": false,
1615
- "rstrip": false,
1616
- "single_word": false,
1617
- "special": true
1618
- },
1619
- "128202": {
1620
- "content": "<|reserved_special_token_197|>",
1621
- "lstrip": false,
1622
- "normalized": false,
1623
- "rstrip": false,
1624
- "single_word": false,
1625
- "special": true
1626
- },
1627
- "128203": {
1628
- "content": "<|reserved_special_token_198|>",
1629
- "lstrip": false,
1630
- "normalized": false,
1631
- "rstrip": false,
1632
- "single_word": false,
1633
- "special": true
1634
- },
1635
- "128204": {
1636
- "content": "<|reserved_special_token_199|>",
1637
- "lstrip": false,
1638
- "normalized": false,
1639
- "rstrip": false,
1640
- "single_word": false,
1641
- "special": true
1642
- },
1643
- "128205": {
1644
- "content": "<|reserved_special_token_200|>",
1645
- "lstrip": false,
1646
- "normalized": false,
1647
- "rstrip": false,
1648
- "single_word": false,
1649
- "special": true
1650
- },
1651
- "128206": {
1652
- "content": "<|reserved_special_token_201|>",
1653
- "lstrip": false,
1654
- "normalized": false,
1655
- "rstrip": false,
1656
- "single_word": false,
1657
- "special": true
1658
- },
1659
- "128207": {
1660
- "content": "<|reserved_special_token_202|>",
1661
- "lstrip": false,
1662
- "normalized": false,
1663
- "rstrip": false,
1664
- "single_word": false,
1665
- "special": true
1666
- },
1667
- "128208": {
1668
- "content": "<|reserved_special_token_203|>",
1669
- "lstrip": false,
1670
- "normalized": false,
1671
- "rstrip": false,
1672
- "single_word": false,
1673
- "special": true
1674
- },
1675
- "128209": {
1676
- "content": "<|reserved_special_token_204|>",
1677
- "lstrip": false,
1678
- "normalized": false,
1679
- "rstrip": false,
1680
- "single_word": false,
1681
- "special": true
1682
- },
1683
- "128210": {
1684
- "content": "<|reserved_special_token_205|>",
1685
- "lstrip": false,
1686
- "normalized": false,
1687
- "rstrip": false,
1688
- "single_word": false,
1689
- "special": true
1690
- },
1691
- "128211": {
1692
- "content": "<|reserved_special_token_206|>",
1693
- "lstrip": false,
1694
- "normalized": false,
1695
- "rstrip": false,
1696
- "single_word": false,
1697
- "special": true
1698
- },
1699
- "128212": {
1700
- "content": "<|reserved_special_token_207|>",
1701
- "lstrip": false,
1702
- "normalized": false,
1703
- "rstrip": false,
1704
- "single_word": false,
1705
- "special": true
1706
- },
1707
- "128213": {
1708
- "content": "<|reserved_special_token_208|>",
1709
- "lstrip": false,
1710
- "normalized": false,
1711
- "rstrip": false,
1712
- "single_word": false,
1713
- "special": true
1714
- },
1715
- "128214": {
1716
- "content": "<|reserved_special_token_209|>",
1717
- "lstrip": false,
1718
- "normalized": false,
1719
- "rstrip": false,
1720
- "single_word": false,
1721
- "special": true
1722
- },
1723
- "128215": {
1724
- "content": "<|reserved_special_token_210|>",
1725
- "lstrip": false,
1726
- "normalized": false,
1727
- "rstrip": false,
1728
- "single_word": false,
1729
- "special": true
1730
- },
1731
- "128216": {
1732
- "content": "<|reserved_special_token_211|>",
1733
- "lstrip": false,
1734
- "normalized": false,
1735
- "rstrip": false,
1736
- "single_word": false,
1737
- "special": true
1738
- },
1739
- "128217": {
1740
- "content": "<|reserved_special_token_212|>",
1741
- "lstrip": false,
1742
- "normalized": false,
1743
- "rstrip": false,
1744
- "single_word": false,
1745
- "special": true
1746
- },
1747
- "128218": {
1748
- "content": "<|reserved_special_token_213|>",
1749
- "lstrip": false,
1750
- "normalized": false,
1751
- "rstrip": false,
1752
- "single_word": false,
1753
- "special": true
1754
- },
1755
- "128219": {
1756
- "content": "<|reserved_special_token_214|>",
1757
- "lstrip": false,
1758
- "normalized": false,
1759
- "rstrip": false,
1760
- "single_word": false,
1761
- "special": true
1762
- },
1763
- "128220": {
1764
- "content": "<|reserved_special_token_215|>",
1765
- "lstrip": false,
1766
- "normalized": false,
1767
- "rstrip": false,
1768
- "single_word": false,
1769
- "special": true
1770
- },
1771
- "128221": {
1772
- "content": "<|reserved_special_token_216|>",
1773
- "lstrip": false,
1774
- "normalized": false,
1775
- "rstrip": false,
1776
- "single_word": false,
1777
- "special": true
1778
- },
1779
- "128222": {
1780
- "content": "<|reserved_special_token_217|>",
1781
- "lstrip": false,
1782
- "normalized": false,
1783
- "rstrip": false,
1784
- "single_word": false,
1785
- "special": true
1786
- },
1787
- "128223": {
1788
- "content": "<|reserved_special_token_218|>",
1789
- "lstrip": false,
1790
- "normalized": false,
1791
- "rstrip": false,
1792
- "single_word": false,
1793
- "special": true
1794
- },
1795
- "128224": {
1796
- "content": "<|reserved_special_token_219|>",
1797
- "lstrip": false,
1798
- "normalized": false,
1799
- "rstrip": false,
1800
- "single_word": false,
1801
- "special": true
1802
- },
1803
- "128225": {
1804
- "content": "<|reserved_special_token_220|>",
1805
- "lstrip": false,
1806
- "normalized": false,
1807
- "rstrip": false,
1808
- "single_word": false,
1809
- "special": true
1810
- },
1811
- "128226": {
1812
- "content": "<|reserved_special_token_221|>",
1813
- "lstrip": false,
1814
- "normalized": false,
1815
- "rstrip": false,
1816
- "single_word": false,
1817
- "special": true
1818
- },
1819
- "128227": {
1820
- "content": "<|reserved_special_token_222|>",
1821
- "lstrip": false,
1822
- "normalized": false,
1823
- "rstrip": false,
1824
- "single_word": false,
1825
- "special": true
1826
- },
1827
- "128228": {
1828
- "content": "<|reserved_special_token_223|>",
1829
- "lstrip": false,
1830
- "normalized": false,
1831
- "rstrip": false,
1832
- "single_word": false,
1833
- "special": true
1834
- },
1835
- "128229": {
1836
- "content": "<|reserved_special_token_224|>",
1837
- "lstrip": false,
1838
- "normalized": false,
1839
- "rstrip": false,
1840
- "single_word": false,
1841
- "special": true
1842
- },
1843
- "128230": {
1844
- "content": "<|reserved_special_token_225|>",
1845
- "lstrip": false,
1846
- "normalized": false,
1847
- "rstrip": false,
1848
- "single_word": false,
1849
- "special": true
1850
- },
1851
- "128231": {
1852
- "content": "<|reserved_special_token_226|>",
1853
- "lstrip": false,
1854
- "normalized": false,
1855
- "rstrip": false,
1856
- "single_word": false,
1857
- "special": true
1858
- },
1859
- "128232": {
1860
- "content": "<|reserved_special_token_227|>",
1861
- "lstrip": false,
1862
- "normalized": false,
1863
- "rstrip": false,
1864
- "single_word": false,
1865
- "special": true
1866
- },
1867
- "128233": {
1868
- "content": "<|reserved_special_token_228|>",
1869
- "lstrip": false,
1870
- "normalized": false,
1871
- "rstrip": false,
1872
- "single_word": false,
1873
- "special": true
1874
- },
1875
- "128234": {
1876
- "content": "<|reserved_special_token_229|>",
1877
- "lstrip": false,
1878
- "normalized": false,
1879
- "rstrip": false,
1880
- "single_word": false,
1881
- "special": true
1882
- },
1883
- "128235": {
1884
- "content": "<|reserved_special_token_230|>",
1885
- "lstrip": false,
1886
- "normalized": false,
1887
- "rstrip": false,
1888
- "single_word": false,
1889
- "special": true
1890
- },
1891
- "128236": {
1892
- "content": "<|reserved_special_token_231|>",
1893
- "lstrip": false,
1894
- "normalized": false,
1895
- "rstrip": false,
1896
- "single_word": false,
1897
- "special": true
1898
- },
1899
- "128237": {
1900
- "content": "<|reserved_special_token_232|>",
1901
- "lstrip": false,
1902
- "normalized": false,
1903
- "rstrip": false,
1904
- "single_word": false,
1905
- "special": true
1906
- },
1907
- "128238": {
1908
- "content": "<|reserved_special_token_233|>",
1909
- "lstrip": false,
1910
- "normalized": false,
1911
- "rstrip": false,
1912
- "single_word": false,
1913
- "special": true
1914
- },
1915
- "128239": {
1916
- "content": "<|reserved_special_token_234|>",
1917
- "lstrip": false,
1918
- "normalized": false,
1919
- "rstrip": false,
1920
- "single_word": false,
1921
- "special": true
1922
- },
1923
- "128240": {
1924
- "content": "<|reserved_special_token_235|>",
1925
- "lstrip": false,
1926
- "normalized": false,
1927
- "rstrip": false,
1928
- "single_word": false,
1929
- "special": true
1930
- },
1931
- "128241": {
1932
- "content": "<|reserved_special_token_236|>",
1933
- "lstrip": false,
1934
- "normalized": false,
1935
- "rstrip": false,
1936
- "single_word": false,
1937
- "special": true
1938
- },
1939
- "128242": {
1940
- "content": "<|reserved_special_token_237|>",
1941
- "lstrip": false,
1942
- "normalized": false,
1943
- "rstrip": false,
1944
- "single_word": false,
1945
- "special": true
1946
- },
1947
- "128243": {
1948
- "content": "<|reserved_special_token_238|>",
1949
- "lstrip": false,
1950
- "normalized": false,
1951
- "rstrip": false,
1952
- "single_word": false,
1953
- "special": true
1954
- },
1955
- "128244": {
1956
- "content": "<|reserved_special_token_239|>",
1957
- "lstrip": false,
1958
- "normalized": false,
1959
- "rstrip": false,
1960
- "single_word": false,
1961
- "special": true
1962
- },
1963
- "128245": {
1964
- "content": "<|reserved_special_token_240|>",
1965
- "lstrip": false,
1966
- "normalized": false,
1967
- "rstrip": false,
1968
- "single_word": false,
1969
- "special": true
1970
- },
1971
- "128246": {
1972
- "content": "<|reserved_special_token_241|>",
1973
- "lstrip": false,
1974
- "normalized": false,
1975
- "rstrip": false,
1976
- "single_word": false,
1977
- "special": true
1978
- },
1979
- "128247": {
1980
- "content": "<|reserved_special_token_242|>",
1981
- "lstrip": false,
1982
- "normalized": false,
1983
- "rstrip": false,
1984
- "single_word": false,
1985
- "special": true
1986
- },
1987
- "128248": {
1988
- "content": "<|reserved_special_token_243|>",
1989
- "lstrip": false,
1990
- "normalized": false,
1991
- "rstrip": false,
1992
- "single_word": false,
1993
- "special": true
1994
- },
1995
- "128249": {
1996
- "content": "<|reserved_special_token_244|>",
1997
- "lstrip": false,
1998
- "normalized": false,
1999
- "rstrip": false,
2000
- "single_word": false,
2001
- "special": true
2002
- },
2003
- "128250": {
2004
- "content": "<|reserved_special_token_245|>",
2005
- "lstrip": false,
2006
- "normalized": false,
2007
- "rstrip": false,
2008
- "single_word": false,
2009
- "special": true
2010
- },
2011
- "128251": {
2012
- "content": "<|reserved_special_token_246|>",
2013
- "lstrip": false,
2014
- "normalized": false,
2015
- "rstrip": false,
2016
- "single_word": false,
2017
- "special": true
2018
- },
2019
- "128252": {
2020
- "content": "<|reserved_special_token_247|>",
2021
- "lstrip": false,
2022
- "normalized": false,
2023
- "rstrip": false,
2024
- "single_word": false,
2025
- "special": true
2026
- },
2027
- "128253": {
2028
- "content": "<|reserved_special_token_248|>",
2029
- "lstrip": false,
2030
- "normalized": false,
2031
- "rstrip": false,
2032
- "single_word": false,
2033
- "special": true
2034
- },
2035
- "128254": {
2036
- "content": "<|reserved_special_token_249|>",
2037
- "lstrip": false,
2038
- "normalized": false,
2039
- "rstrip": false,
2040
- "single_word": false,
2041
- "special": true
2042
- },
2043
- "128255": {
2044
- "content": "<|reserved_special_token_250|>",
2045
  "lstrip": false,
2046
  "normalized": false,
2047
  "rstrip": false,
@@ -2049,15 +25,22 @@
2049
  "special": true
2050
  }
2051
  },
2052
- "bos_token": "<|begin_of_text|>",
2053
- "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}",
2054
- "clean_up_tokenization_spaces": true,
2055
- "eos_token": "<|end_of_text|>",
2056
- "model_input_names": [
2057
- "input_ids",
2058
- "attention_mask"
2059
  ],
 
 
 
 
 
2060
  "model_max_length": 2048,
2061
- "pad_token": "<|end_of_text|>",
2062
- "tokenizer_class": "PreTrainedTokenizerFast"
 
 
 
 
 
2063
  }
 
1
  {
2
  "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<unk>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
+ "1": {
12
+ "content": "<s>",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  },
19
+ "2": {
20
+ "content": "</s>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  "lstrip": false,
22
  "normalized": false,
23
  "rstrip": false,
 
25
  "special": true
26
  }
27
  },
28
+ "additional_special_tokens": [
29
+ "<unk>",
30
+ "<s>",
31
+ "</s>"
 
 
 
32
  ],
33
+ "bos_token": "<s>",
34
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
35
+ "clean_up_tokenization_spaces": false,
36
+ "eos_token": "</s>",
37
+ "legacy": true,
38
  "model_max_length": 2048,
39
+ "pad_token": "</s>",
40
+ "sp_model_kwargs": {},
41
+ "spaces_between_special_tokens": false,
42
+ "tokenizer_class": "LlamaTokenizer",
43
+ "truncation_side": "left",
44
+ "unk_token": "<unk>",
45
+ "use_default_system_prompt": true
46
  }
train_results.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "epoch": 1.0,
3
- "train_loss": 0.0564979040210352,
4
- "train_runtime": 4410.0999,
5
- "train_samples": 61134,
6
- "train_samples_per_second": 13.862,
7
- "train_steps_per_second": 0.108
8
  }
 
1
  {
2
  "epoch": 1.0,
3
+ "train_loss": 0.08422429972704783,
4
+ "train_runtime": 6988.5729,
5
+ "train_samples": 113028,
6
+ "train_samples_per_second": 16.173,
7
+ "train_steps_per_second": 0.126
8
  }
trainer_state.json CHANGED
@@ -1,21 +1,21 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.9984301412872841,
5
- "eval_steps": 100,
6
- "global_step": 477,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
10
  "log_history": [
11
  {
12
  "epoch": 0.0,
13
- "learning_rate": 1.0416666666666666e-08,
14
- "logits/chosen": 0.01849743165075779,
15
- "logits/rejected": 0.013860300183296204,
16
- "logps/chosen": -318.92303466796875,
17
- "logps/rejected": -327.4117126464844,
18
- "loss": 0.0872,
19
  "rewards/accuracies": 0.0,
20
  "rewards/chosen": 0.0,
21
  "rewards/margins": 0.0,
@@ -23,739 +23,1249 @@
23
  "step": 1
24
  },
25
  {
26
- "epoch": 0.02,
27
- "learning_rate": 1.0416666666666667e-07,
28
- "logits/chosen": 0.0165844839066267,
29
- "logits/rejected": 0.029045505449175835,
30
- "logps/chosen": -380.119384765625,
31
- "logps/rejected": -372.70452880859375,
32
- "loss": 0.0916,
33
- "rewards/accuracies": 0.4930555522441864,
34
- "rewards/chosen": 0.00031676876824349165,
35
- "rewards/margins": 0.0008045767317526042,
36
- "rewards/rejected": -0.00048780813813209534,
37
  "step": 10
38
  },
39
  {
40
- "epoch": 0.04,
41
- "learning_rate": 2.0833333333333333e-07,
42
- "logits/chosen": -0.01443287543952465,
43
- "logits/rejected": 0.01765434443950653,
44
- "logps/chosen": -396.4976501464844,
45
- "logps/rejected": -366.0671691894531,
46
- "loss": 0.0929,
47
- "rewards/accuracies": 0.581250011920929,
48
- "rewards/chosen": 0.000257034320384264,
49
- "rewards/margins": 0.0013006285298615694,
50
- "rewards/rejected": -0.0010435942094773054,
51
  "step": 20
52
  },
53
  {
54
- "epoch": 0.06,
55
- "learning_rate": 3.1249999999999997e-07,
56
- "logits/chosen": 0.037671297788619995,
57
- "logits/rejected": 0.06698160618543625,
58
- "logps/chosen": -374.0677795410156,
59
- "logps/rejected": -360.3742370605469,
60
- "loss": 0.0849,
61
- "rewards/accuracies": 0.637499988079071,
62
- "rewards/chosen": -0.0024321433156728745,
63
- "rewards/margins": 0.003862987505272031,
64
- "rewards/rejected": -0.006295130588114262,
65
  "step": 30
66
  },
67
  {
68
- "epoch": 0.08,
69
- "learning_rate": 4.1666666666666667e-07,
70
- "logits/chosen": -0.016021814197301865,
71
- "logits/rejected": 0.040130265057086945,
72
- "logps/chosen": -384.62115478515625,
73
- "logps/rejected": -369.37591552734375,
74
- "loss": 0.0899,
75
- "rewards/accuracies": 0.6499999761581421,
76
- "rewards/chosen": -0.005014514084905386,
77
- "rewards/margins": 0.00654798885807395,
78
- "rewards/rejected": -0.01156250387430191,
79
  "step": 40
80
  },
81
  {
82
- "epoch": 0.1,
83
- "learning_rate": 4.999731868769026e-07,
84
- "logits/chosen": 0.021576542407274246,
85
- "logits/rejected": 0.04092331975698471,
86
- "logps/chosen": -395.0044860839844,
87
- "logps/rejected": -385.6026306152344,
88
- "loss": 0.0905,
89
- "rewards/accuracies": 0.65625,
90
- "rewards/chosen": -0.011928597465157509,
91
- "rewards/margins": 0.01728428527712822,
92
- "rewards/rejected": -0.02921288087964058,
93
  "step": 50
94
  },
95
  {
96
- "epoch": 0.13,
97
- "learning_rate": 4.990353313429303e-07,
98
- "logits/chosen": 0.09396852552890778,
99
- "logits/rejected": 0.177364319562912,
100
- "logps/chosen": -373.46978759765625,
101
- "logps/rejected": -350.2561950683594,
102
- "loss": 0.0896,
103
- "rewards/accuracies": 0.675000011920929,
104
- "rewards/chosen": -0.022122707217931747,
105
- "rewards/margins": 0.04510267823934555,
106
- "rewards/rejected": -0.067225381731987,
107
  "step": 60
108
  },
109
  {
110
- "epoch": 0.15,
111
- "learning_rate": 4.967625656594781e-07,
112
- "logits/chosen": 0.09231746941804886,
113
- "logits/rejected": 0.10504136979579926,
114
- "logps/chosen": -380.4566955566406,
115
- "logps/rejected": -384.76495361328125,
116
- "loss": 0.0895,
117
- "rewards/accuracies": 0.675000011920929,
118
- "rewards/chosen": -0.020214151591062546,
119
- "rewards/margins": 0.044125162065029144,
120
- "rewards/rejected": -0.06433931738138199,
121
  "step": 70
122
  },
123
  {
124
- "epoch": 0.17,
125
- "learning_rate": 4.93167072587771e-07,
126
- "logits/chosen": 0.1812177449464798,
127
- "logits/rejected": 0.2344866693019867,
128
- "logps/chosen": -373.54779052734375,
129
- "logps/rejected": -344.9815673828125,
130
- "loss": 0.0887,
131
- "rewards/accuracies": 0.706250011920929,
132
- "rewards/chosen": -0.003499386366456747,
133
- "rewards/margins": 0.11121924966573715,
134
- "rewards/rejected": -0.11471863090991974,
135
  "step": 80
136
  },
137
  {
138
- "epoch": 0.19,
139
- "learning_rate": 4.882681251368548e-07,
140
- "logits/chosen": 0.23078179359436035,
141
- "logits/rejected": 0.3160688281059265,
142
- "logps/chosen": -398.22735595703125,
143
- "logps/rejected": -354.7359619140625,
144
- "loss": 0.0854,
145
- "rewards/accuracies": 0.6875,
146
- "rewards/chosen": -0.03982505947351456,
147
- "rewards/margins": 0.12135788053274155,
148
- "rewards/rejected": -0.1611829400062561,
149
  "step": 90
150
  },
151
  {
152
- "epoch": 0.21,
153
- "learning_rate": 4.820919832540181e-07,
154
- "logits/chosen": 0.33522385358810425,
155
- "logits/rejected": 0.34693339467048645,
156
- "logps/chosen": -373.6068115234375,
157
- "logps/rejected": -393.63311767578125,
158
- "loss": 0.09,
159
  "rewards/accuracies": 0.7250000238418579,
160
- "rewards/chosen": -0.05229802802205086,
161
- "rewards/margins": 0.1304590255022049,
162
- "rewards/rejected": -0.18275703489780426,
163
  "step": 100
164
  },
165
  {
166
- "epoch": 0.21,
167
- "eval_logits/chosen": 0.49261584877967834,
168
- "eval_logits/rejected": 0.5302599668502808,
169
- "eval_logps/chosen": -392.5748291015625,
170
- "eval_logps/rejected": -418.8423767089844,
171
- "eval_loss": 0.08443526923656464,
172
- "eval_rewards/accuracies": 0.69921875,
173
- "eval_rewards/chosen": -0.09445539116859436,
174
- "eval_rewards/margins": 0.20123936235904694,
175
- "eval_rewards/rejected": -0.2956947684288025,
176
- "eval_runtime": 75.5045,
177
- "eval_samples_per_second": 26.488,
178
- "eval_steps_per_second": 0.424,
179
- "step": 100
180
  },
181
  {
182
- "epoch": 0.23,
183
- "learning_rate": 4.7467175306295647e-07,
184
- "logits/chosen": 0.5233359336853027,
185
- "logits/rejected": 0.5924205780029297,
186
- "logps/chosen": -409.8135681152344,
187
- "logps/rejected": -400.6418151855469,
188
- "loss": 0.0775,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
189
  "rewards/accuracies": 0.699999988079071,
190
- "rewards/chosen": -0.17791931331157684,
191
- "rewards/margins": 0.2254853993654251,
192
- "rewards/rejected": -0.40340471267700195,
193
- "step": 110
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
  },
195
  {
196
  "epoch": 0.25,
197
- "learning_rate": 4.6604720940421207e-07,
198
- "logits/chosen": 0.6610409021377563,
199
- "logits/rejected": 0.8009072542190552,
200
- "logps/chosen": -459.3719787597656,
201
- "logps/rejected": -480.128662109375,
202
- "loss": 0.0697,
203
  "rewards/accuracies": 0.762499988079071,
204
- "rewards/chosen": -0.2562519609928131,
205
- "rewards/margins": 0.2973101735115051,
206
- "rewards/rejected": -0.5535621643066406,
207
- "step": 120
 
 
 
 
 
 
 
 
 
 
 
 
 
 
208
  },
209
  {
210
  "epoch": 0.27,
211
- "learning_rate": 4.5626458262912735e-07,
212
- "logits/chosen": 0.8142817616462708,
213
- "logits/rejected": 1.0136159658432007,
214
- "logps/chosen": -453.57037353515625,
215
- "logps/rejected": -438.6094665527344,
216
- "loss": 0.0557,
217
- "rewards/accuracies": 0.65625,
218
- "rewards/chosen": -0.45035696029663086,
219
- "rewards/margins": 0.2075636386871338,
220
- "rewards/rejected": -0.6579206585884094,
221
- "step": 130
 
 
 
 
 
 
 
 
 
 
 
 
 
 
222
  },
223
  {
224
  "epoch": 0.29,
225
- "learning_rate": 4.453763107901675e-07,
226
- "logits/chosen": 0.9267638325691223,
227
- "logits/rejected": 0.9543718099594116,
228
- "logps/chosen": -426.4134826660156,
229
- "logps/rejected": -436.49261474609375,
230
- "loss": 0.06,
231
- "rewards/accuracies": 0.7562500238418579,
232
- "rewards/chosen": -0.33891427516937256,
233
- "rewards/margins": 0.302972674369812,
234
- "rewards/rejected": -0.6418868899345398,
235
- "step": 140
236
  },
237
  {
238
  "epoch": 0.31,
239
- "learning_rate": 4.3344075855595097e-07,
240
- "logits/chosen": 0.834929347038269,
241
- "logits/rejected": 1.0096248388290405,
242
- "logps/chosen": -383.9637756347656,
243
- "logps/rejected": -392.84912109375,
244
- "loss": 0.0588,
245
- "rewards/accuracies": 0.706250011920929,
246
- "rewards/chosen": -0.2390960454940796,
247
- "rewards/margins": 0.35297515988349915,
248
- "rewards/rejected": -0.5920711755752563,
249
- "step": 150
 
 
 
 
 
 
 
 
 
 
 
 
 
 
250
  },
251
  {
252
  "epoch": 0.33,
253
- "learning_rate": 4.2052190435769554e-07,
254
- "logits/chosen": 1.0894076824188232,
255
- "logits/rejected": 1.2157137393951416,
256
- "logps/chosen": -429.09857177734375,
257
- "logps/rejected": -461.9745178222656,
258
- "loss": 0.0509,
259
- "rewards/accuracies": 0.71875,
260
- "rewards/chosen": -0.5552287101745605,
261
- "rewards/margins": 0.3786623775959015,
262
- "rewards/rejected": -0.9338911175727844,
263
- "step": 160
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
264
  },
265
  {
266
  "epoch": 0.36,
267
- "learning_rate": 4.0668899744407567e-07,
268
- "logits/chosen": 0.9078506231307983,
269
- "logits/rejected": 1.0372017621994019,
270
- "logps/chosen": -482.3373107910156,
271
- "logps/rejected": -479.88916015625,
272
- "loss": 0.0479,
273
- "rewards/accuracies": 0.6625000238418579,
274
- "rewards/chosen": -0.505352795124054,
275
- "rewards/margins": 0.26132458448410034,
276
- "rewards/rejected": -0.7666773796081543,
277
- "step": 170
 
 
 
 
 
 
 
 
 
 
 
 
 
 
278
  },
279
  {
280
  "epoch": 0.38,
281
- "learning_rate": 3.920161866827889e-07,
282
- "logits/chosen": 0.80833500623703,
283
- "logits/rejected": 0.8488121032714844,
284
- "logps/chosen": -413.3409118652344,
285
- "logps/rejected": -438.3705139160156,
286
- "loss": 0.0476,
287
- "rewards/accuracies": 0.6875,
288
- "rewards/chosen": -0.4049296975135803,
289
- "rewards/margins": 0.3900560736656189,
290
- "rewards/rejected": -0.7949857115745544,
291
- "step": 180
292
  },
293
  {
294
  "epoch": 0.4,
295
- "learning_rate": 3.765821230985757e-07,
296
- "logits/chosen": 0.9091412425041199,
297
- "logits/rejected": 1.0051593780517578,
298
- "logps/chosen": -395.74383544921875,
299
- "logps/rejected": -402.8367919921875,
300
- "loss": 0.0478,
301
- "rewards/accuracies": 0.7124999761581421,
302
- "rewards/chosen": -0.4961649775505066,
303
- "rewards/margins": 0.3637959361076355,
304
- "rewards/rejected": -0.8599609136581421,
305
- "step": 190
306
  },
307
  {
308
- "epoch": 0.42,
309
- "learning_rate": 3.604695382782159e-07,
310
- "logits/chosen": 1.0421111583709717,
311
- "logits/rejected": 1.1686071157455444,
312
- "logps/chosen": -422.24224853515625,
313
- "logps/rejected": -469.1251525878906,
314
- "loss": 0.0405,
315
- "rewards/accuracies": 0.7437499761581421,
316
- "rewards/chosen": -0.7005800008773804,
317
- "rewards/margins": 0.46449971199035645,
318
- "rewards/rejected": -1.1650797128677368,
319
- "step": 200
320
  },
321
  {
322
  "epoch": 0.42,
323
- "eval_logits/chosen": 1.1859312057495117,
324
- "eval_logits/rejected": 1.2733540534973145,
325
- "eval_logps/chosen": -449.3788757324219,
326
- "eval_logps/rejected": -505.84661865234375,
327
- "eval_loss": 0.045209601521492004,
328
- "eval_rewards/accuracies": 0.75390625,
329
- "eval_rewards/chosen": -0.6624964475631714,
330
- "eval_rewards/margins": 0.5032405257225037,
331
- "eval_rewards/rejected": -1.1657369136810303,
332
- "eval_runtime": 75.0855,
333
- "eval_samples_per_second": 26.636,
334
- "eval_steps_per_second": 0.426,
335
- "step": 200
336
  },
337
  {
338
- "epoch": 0.44,
339
- "learning_rate": 3.4376480090239047e-07,
340
- "logits/chosen": 0.9289053082466125,
341
- "logits/rejected": 1.0322377681732178,
342
- "logps/chosen": -454.09521484375,
343
- "logps/rejected": -484.48956298828125,
344
- "loss": 0.0428,
345
  "rewards/accuracies": 0.7250000238418579,
346
- "rewards/chosen": -0.5330354571342468,
347
- "rewards/margins": 0.47441625595092773,
348
- "rewards/rejected": -1.0074517726898193,
349
- "step": 210
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
350
  },
351
  {
352
  "epoch": 0.46,
353
- "learning_rate": 3.265574537815398e-07,
354
- "logits/chosen": 0.6325788497924805,
355
- "logits/rejected": 0.8454742431640625,
356
- "logps/chosen": -443.6888732910156,
357
- "logps/rejected": -444.2510681152344,
358
- "loss": 0.051,
359
- "rewards/accuracies": 0.7250000238418579,
360
- "rewards/chosen": -0.4299241006374359,
361
- "rewards/margins": 0.41193485260009766,
362
- "rewards/rejected": -0.8418590426445007,
363
- "step": 220
364
  },
365
  {
366
  "epoch": 0.48,
367
- "learning_rate": 3.0893973387735683e-07,
368
- "logits/chosen": 0.8997888565063477,
369
- "logits/rejected": 0.9853512048721313,
370
- "logps/chosen": -413.89520263671875,
371
- "logps/rejected": -458.99676513671875,
372
- "loss": 0.0525,
373
- "rewards/accuracies": 0.731249988079071,
374
- "rewards/chosen": -0.5308324694633484,
375
- "rewards/margins": 0.4597201943397522,
376
- "rewards/rejected": -0.9905527830123901,
377
- "step": 230
 
 
 
 
 
 
 
 
 
 
 
 
 
 
378
  },
379
  {
380
  "epoch": 0.5,
381
- "learning_rate": 2.910060778827554e-07,
382
- "logits/chosen": 1.0547417402267456,
383
- "logits/rejected": 1.1306800842285156,
384
- "logps/chosen": -493.91790771484375,
385
- "logps/rejected": -539.1799926757812,
386
- "loss": 0.0471,
387
- "rewards/accuracies": 0.7124999761581421,
388
- "rewards/chosen": -0.6113244295120239,
389
- "rewards/margins": 0.5182110667228699,
390
- "rewards/rejected": -1.1295355558395386,
391
- "step": 240
 
 
 
 
 
 
 
 
 
 
 
 
 
 
392
  },
393
  {
394
  "epoch": 0.52,
395
- "learning_rate": 2.7285261601056697e-07,
396
- "logits/chosen": 1.2281643152236938,
397
- "logits/rejected": 1.359076976776123,
398
- "logps/chosen": -466.77001953125,
399
- "logps/rejected": -483.91259765625,
400
- "loss": 0.0419,
401
- "rewards/accuracies": 0.6625000238418579,
402
- "rewards/chosen": -0.7258759140968323,
403
- "rewards/margins": 0.42711353302001953,
404
- "rewards/rejected": -1.152989387512207,
405
- "step": 250
 
 
 
 
 
 
 
 
 
 
 
 
 
 
406
  },
407
  {
408
  "epoch": 0.54,
409
- "learning_rate": 2.5457665670441937e-07,
410
- "logits/chosen": 1.2255347967147827,
411
- "logits/rejected": 1.462003469467163,
412
- "logps/chosen": -491.76190185546875,
413
- "logps/rejected": -505.47161865234375,
414
- "loss": 0.0451,
415
- "rewards/accuracies": 0.7124999761581421,
416
- "rewards/chosen": -0.7059242725372314,
417
- "rewards/margins": 0.6359472274780273,
418
- "rewards/rejected": -1.3418715000152588,
419
- "step": 260
 
 
 
 
 
 
 
 
 
 
 
 
 
 
420
  },
421
  {
422
  "epoch": 0.57,
423
- "learning_rate": 2.3627616503391812e-07,
424
- "logits/chosen": 1.3674428462982178,
425
- "logits/rejected": 1.578064203262329,
426
- "logps/chosen": -486.397216796875,
427
- "logps/rejected": -492.1827087402344,
428
- "loss": 0.0472,
429
- "rewards/accuracies": 0.6937500238418579,
430
- "rewards/chosen": -0.7773429155349731,
431
- "rewards/margins": 0.38945746421813965,
432
- "rewards/rejected": -1.1668003797531128,
433
- "step": 270
 
 
 
 
 
 
 
 
 
 
 
 
 
 
434
  },
435
  {
436
  "epoch": 0.59,
437
- "learning_rate": 2.1804923757009882e-07,
438
- "logits/chosen": 1.366081953048706,
439
- "logits/rejected": 1.5207383632659912,
440
- "logps/chosen": -477.0743103027344,
441
- "logps/rejected": -530.8953857421875,
442
- "loss": 0.0445,
443
- "rewards/accuracies": 0.675000011920929,
444
- "rewards/chosen": -0.8941423296928406,
445
- "rewards/margins": 0.4790104925632477,
446
- "rewards/rejected": -1.3731528520584106,
447
- "step": 280
 
 
 
 
 
 
 
 
 
 
 
 
 
 
448
  },
449
  {
450
  "epoch": 0.61,
451
- "learning_rate": 1.9999357655598891e-07,
452
- "logits/chosen": 1.2689809799194336,
453
- "logits/rejected": 1.4011085033416748,
454
- "logps/chosen": -438.982421875,
455
- "logps/rejected": -469.45703125,
456
- "loss": 0.0464,
457
- "rewards/accuracies": 0.7124999761581421,
458
- "rewards/chosen": -0.62468022108078,
459
- "rewards/margins": 0.513271689414978,
460
- "rewards/rejected": -1.1379519701004028,
461
- "step": 290
462
  },
463
  {
464
- "epoch": 0.63,
465
- "learning_rate": 1.8220596619089573e-07,
466
- "logits/chosen": 1.1505718231201172,
467
- "logits/rejected": 1.4240622520446777,
468
- "logps/chosen": -458.03631591796875,
469
- "logps/rejected": -443.11712646484375,
470
- "loss": 0.0479,
471
- "rewards/accuracies": 0.6625000238418579,
472
- "rewards/chosen": -0.6710134148597717,
473
- "rewards/margins": 0.39567166566848755,
474
- "rewards/rejected": -1.0666849613189697,
475
- "step": 300
476
  },
477
  {
478
  "epoch": 0.63,
479
- "eval_logits/chosen": 1.2982094287872314,
480
- "eval_logits/rejected": 1.409311056137085,
481
- "eval_logps/chosen": -435.2132568359375,
482
- "eval_logps/rejected": -501.30841064453125,
483
- "eval_loss": 0.047696553170681,
484
- "eval_rewards/accuracies": 0.73828125,
485
- "eval_rewards/chosen": -0.5208398699760437,
486
- "eval_rewards/margins": 0.5995149612426758,
487
- "eval_rewards/rejected": -1.1203548908233643,
488
- "eval_runtime": 75.296,
489
- "eval_samples_per_second": 26.562,
490
- "eval_steps_per_second": 0.425,
491
- "step": 300
492
  },
493
  {
494
  "epoch": 0.65,
495
- "learning_rate": 1.647817538357072e-07,
496
- "logits/chosen": 1.2780801057815552,
497
- "logits/rejected": 1.3399560451507568,
498
- "logps/chosen": -475.42413330078125,
499
- "logps/rejected": -517.4520263671875,
500
- "loss": 0.0478,
501
- "rewards/accuracies": 0.6499999761581421,
502
- "rewards/chosen": -0.7529923319816589,
503
- "rewards/margins": 0.4368392825126648,
504
- "rewards/rejected": -1.1898316144943237,
505
- "step": 310
 
 
 
 
 
 
 
 
 
 
 
 
 
 
506
  },
507
  {
508
  "epoch": 0.67,
509
- "learning_rate": 1.478143389201113e-07,
510
- "logits/chosen": 1.198677897453308,
511
- "logits/rejected": 1.4085700511932373,
512
- "logps/chosen": -498.35711669921875,
513
- "logps/rejected": -497.4380798339844,
514
- "loss": 0.0424,
515
- "rewards/accuracies": 0.65625,
516
- "rewards/chosen": -0.6401562690734863,
517
- "rewards/margins": 0.48012202978134155,
518
- "rewards/rejected": -1.1202783584594727,
519
- "step": 320
 
 
 
 
 
 
 
 
 
 
 
 
 
 
520
  },
521
  {
522
  "epoch": 0.69,
523
- "learning_rate": 1.3139467229135998e-07,
524
- "logits/chosen": 1.2183105945587158,
525
- "logits/rejected": 1.2747819423675537,
526
- "logps/chosen": -442.5284118652344,
527
- "logps/rejected": -533.216796875,
528
- "loss": 0.0454,
529
- "rewards/accuracies": 0.737500011920929,
530
- "rewards/chosen": -0.5910875201225281,
531
- "rewards/margins": 0.5799761414527893,
532
- "rewards/rejected": -1.1710636615753174,
533
- "step": 330
 
 
 
 
 
 
 
 
 
 
 
 
 
 
534
  },
535
  {
536
  "epoch": 0.71,
537
- "learning_rate": 1.1561076868822755e-07,
538
- "logits/chosen": 1.203604817390442,
539
- "logits/rejected": 1.1832085847854614,
540
- "logps/chosen": -441.4521484375,
541
- "logps/rejected": -512.8982543945312,
542
- "loss": 0.0428,
543
- "rewards/accuracies": 0.699999988079071,
544
- "rewards/chosen": -0.7571262121200562,
545
- "rewards/margins": 0.4640630781650543,
546
- "rewards/rejected": -1.221189260482788,
547
- "step": 340
548
  },
549
  {
550
- "epoch": 0.73,
551
- "learning_rate": 1.0054723495346482e-07,
552
- "logits/chosen": 1.3052194118499756,
553
- "logits/rejected": 1.382683515548706,
554
- "logps/chosen": -465.3661193847656,
555
- "logps/rejected": -528.7847290039062,
556
- "loss": 0.0412,
557
- "rewards/accuracies": 0.737500011920929,
558
- "rewards/chosen": -0.7757940292358398,
559
- "rewards/margins": 0.4744884967803955,
560
- "rewards/rejected": -1.2502825260162354,
561
- "step": 350
 
 
 
 
 
 
 
 
 
 
 
 
 
 
562
  },
563
  {
564
  "epoch": 0.75,
565
- "learning_rate": 8.628481651367875e-08,
566
- "logits/chosen": 1.1976938247680664,
567
- "logits/rejected": 1.432969331741333,
568
- "logps/chosen": -491.15771484375,
569
- "logps/rejected": -515.0520629882812,
570
- "loss": 0.0446,
571
- "rewards/accuracies": 0.706250011920929,
572
- "rewards/chosen": -0.6491819620132446,
573
- "rewards/margins": 0.6244359612464905,
574
- "rewards/rejected": -1.2736178636550903,
575
- "step": 360
 
 
 
 
 
 
 
 
 
 
 
 
 
 
576
  },
577
  {
578
  "epoch": 0.77,
579
- "learning_rate": 7.289996455765748e-08,
580
- "logits/chosen": 1.192779541015625,
581
- "logits/rejected": 1.324210524559021,
582
- "logps/chosen": -504.5486755371094,
583
- "logps/rejected": -508.7030334472656,
584
- "loss": 0.0435,
585
- "rewards/accuracies": 0.699999988079071,
586
- "rewards/chosen": -0.7155844569206238,
587
- "rewards/margins": 0.5151349306106567,
588
- "rewards/rejected": -1.2307194471359253,
589
- "step": 370
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
590
  },
591
  {
592
  "epoch": 0.8,
593
- "learning_rate": 6.046442623320145e-08,
594
- "logits/chosen": 1.223356008529663,
595
- "logits/rejected": 1.4434764385223389,
596
- "logps/chosen": -474.7169494628906,
597
- "logps/rejected": -518.0782470703125,
598
- "loss": 0.0476,
599
- "rewards/accuracies": 0.737500011920929,
600
- "rewards/chosen": -0.6729675531387329,
601
- "rewards/margins": 0.6387326717376709,
602
- "rewards/rejected": -1.3117002248764038,
603
- "step": 380
604
  },
605
  {
606
- "epoch": 0.82,
607
- "learning_rate": 4.904486005914027e-08,
608
- "logits/chosen": 1.3060978651046753,
609
- "logits/rejected": 1.4896109104156494,
610
- "logps/chosen": -470.46661376953125,
611
- "logps/rejected": -502.4981384277344,
612
- "loss": 0.0482,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
613
  "rewards/accuracies": 0.7437499761581421,
614
- "rewards/chosen": -0.6783354878425598,
615
- "rewards/margins": 0.5045996904373169,
616
- "rewards/rejected": -1.1829349994659424,
617
- "step": 390
618
  },
619
  {
620
  "epoch": 0.84,
621
- "learning_rate": 3.8702478614051345e-08,
622
- "logits/chosen": 1.3413165807724,
623
- "logits/rejected": 1.4800562858581543,
624
- "logps/chosen": -450.84844970703125,
625
- "logps/rejected": -509.7266540527344,
626
- "loss": 0.0457,
627
- "rewards/accuracies": 0.65625,
628
- "rewards/chosen": -0.7674819231033325,
629
- "rewards/margins": 0.4173991084098816,
630
- "rewards/rejected": -1.1848809719085693,
631
- "step": 400
632
  },
633
  {
634
- "epoch": 0.84,
635
- "eval_logits/chosen": 1.491492748260498,
636
- "eval_logits/rejected": 1.6154029369354248,
637
- "eval_logps/chosen": -448.419677734375,
638
- "eval_logps/rejected": -518.3443603515625,
639
- "eval_loss": 0.044891636818647385,
640
- "eval_rewards/accuracies": 0.73828125,
641
- "eval_rewards/chosen": -0.6529037952423096,
642
- "eval_rewards/margins": 0.6378109455108643,
643
- "eval_rewards/rejected": -1.2907147407531738,
644
- "eval_runtime": 74.6873,
645
- "eval_samples_per_second": 26.778,
646
- "eval_steps_per_second": 0.428,
647
- "step": 400
648
  },
649
  {
650
  "epoch": 0.86,
651
- "learning_rate": 2.9492720416985e-08,
652
- "logits/chosen": 1.3658090829849243,
653
- "logits/rejected": 1.523946762084961,
654
- "logps/chosen": -461.0426330566406,
655
- "logps/rejected": -491.6429138183594,
656
- "loss": 0.045,
657
- "rewards/accuracies": 0.706250011920929,
658
- "rewards/chosen": -0.6857269406318665,
659
- "rewards/margins": 0.5137700438499451,
660
- "rewards/rejected": -1.1994969844818115,
661
- "step": 410
 
 
 
 
 
 
 
 
 
 
 
 
 
 
662
  },
663
  {
664
  "epoch": 0.88,
665
- "learning_rate": 2.1464952759020856e-08,
666
- "logits/chosen": 1.3796783685684204,
667
- "logits/rejected": 1.5178402662277222,
668
- "logps/chosen": -454.60455322265625,
669
- "logps/rejected": -483.65704345703125,
670
- "loss": 0.0418,
671
- "rewards/accuracies": 0.768750011920929,
672
- "rewards/chosen": -0.6705530285835266,
673
- "rewards/margins": 0.604373574256897,
674
- "rewards/rejected": -1.2749265432357788,
675
- "step": 420
676
  },
677
  {
678
- "epoch": 0.9,
679
- "learning_rate": 1.4662207078575684e-08,
680
- "logits/chosen": 1.334680199623108,
681
- "logits/rejected": 1.4741976261138916,
682
- "logps/chosen": -504.280029296875,
683
- "logps/rejected": -529.8871459960938,
684
- "loss": 0.0453,
685
- "rewards/accuracies": 0.699999988079071,
686
- "rewards/chosen": -0.7305961847305298,
687
- "rewards/margins": 0.5881385207176208,
688
- "rewards/rejected": -1.3187347650527954,
689
- "step": 430
 
 
 
 
 
 
 
 
 
 
 
 
 
 
690
  },
691
  {
692
  "epoch": 0.92,
693
- "learning_rate": 9.12094829893642e-09,
694
- "logits/chosen": 1.3827157020568848,
695
- "logits/rejected": 1.5478546619415283,
696
- "logps/chosen": -453.01171875,
697
- "logps/rejected": -480.3030700683594,
698
- "loss": 0.0414,
699
- "rewards/accuracies": 0.699999988079071,
700
- "rewards/chosen": -0.7755357027053833,
701
- "rewards/margins": 0.5378071069717407,
702
- "rewards/rejected": -1.313342809677124,
703
- "step": 440
 
 
 
 
 
 
 
 
 
 
 
 
 
 
704
  },
705
  {
706
  "epoch": 0.94,
707
- "learning_rate": 4.8708793644441086e-09,
708
- "logits/chosen": 1.2280631065368652,
709
- "logits/rejected": 1.454526662826538,
710
- "logps/chosen": -487.4305114746094,
711
- "logps/rejected": -500.71087646484375,
712
- "loss": 0.0425,
713
- "rewards/accuracies": 0.65625,
714
- "rewards/chosen": -0.6377500295639038,
715
- "rewards/margins": 0.5590785145759583,
716
- "rewards/rejected": -1.1968284845352173,
717
- "step": 450
 
 
 
 
 
 
 
 
 
 
 
 
 
 
718
  },
719
  {
720
  "epoch": 0.96,
721
- "learning_rate": 1.9347820230782295e-09,
722
- "logits/chosen": 1.336721658706665,
723
- "logits/rejected": 1.4986612796783447,
724
- "logps/chosen": -455.5997619628906,
725
- "logps/rejected": -474.46038818359375,
726
- "loss": 0.0425,
727
- "rewards/accuracies": 0.7124999761581421,
728
- "rewards/chosen": -0.7689257264137268,
729
- "rewards/margins": 0.4747004508972168,
730
- "rewards/rejected": -1.243626356124878,
731
- "step": 460
 
 
 
 
 
 
 
 
 
 
 
 
 
 
732
  },
733
  {
734
  "epoch": 0.98,
735
- "learning_rate": 3.2839470889836627e-10,
736
- "logits/chosen": 1.2109500169754028,
737
- "logits/rejected": 1.3351854085922241,
738
- "logps/chosen": -490.6439514160156,
739
- "logps/rejected": -541.4273681640625,
740
- "loss": 0.0421,
741
- "rewards/accuracies": 0.75,
742
- "rewards/chosen": -0.7248164415359497,
743
- "rewards/margins": 0.548802375793457,
744
- "rewards/rejected": -1.2736186981201172,
745
- "step": 470
 
 
 
 
 
 
 
 
 
 
 
 
 
 
746
  },
747
  {
748
  "epoch": 1.0,
749
- "step": 477,
750
  "total_flos": 0.0,
751
- "train_loss": 0.0564979040210352,
752
- "train_runtime": 4410.0999,
753
- "train_samples_per_second": 13.862,
754
- "train_steps_per_second": 0.108
755
  }
756
  ],
757
  "logging_steps": 10,
758
- "max_steps": 477,
759
  "num_train_epochs": 1,
760
  "save_steps": 1000,
761
  "total_flos": 0.0,
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 0.9994340690435767,
5
+ "eval_steps": 1000,
6
+ "global_step": 883,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
10
  "log_history": [
11
  {
12
  "epoch": 0.0,
13
+ "learning_rate": 5.617977528089887e-09,
14
+ "logits/chosen": -2.8445301055908203,
15
+ "logits/rejected": -2.814356565475464,
16
+ "logps/chosen": -319.6065979003906,
17
+ "logps/rejected": -174.94183349609375,
18
+ "loss": 0.3715,
19
  "rewards/accuracies": 0.0,
20
  "rewards/chosen": 0.0,
21
  "rewards/margins": 0.0,
 
23
  "step": 1
24
  },
25
  {
26
+ "epoch": 0.01,
27
+ "learning_rate": 5.617977528089887e-08,
28
+ "logits/chosen": -2.8251900672912598,
29
+ "logits/rejected": -2.789383888244629,
30
+ "logps/chosen": -309.8087463378906,
31
+ "logps/rejected": -215.91148376464844,
32
+ "loss": 0.3422,
33
+ "rewards/accuracies": 0.3472222089767456,
34
+ "rewards/chosen": -1.3414668501354754e-05,
35
+ "rewards/margins": -0.0010301731526851654,
36
+ "rewards/rejected": 0.001016758382320404,
37
  "step": 10
38
  },
39
  {
40
+ "epoch": 0.02,
41
+ "learning_rate": 1.1235955056179774e-07,
42
+ "logits/chosen": -2.7988970279693604,
43
+ "logits/rejected": -2.761237382888794,
44
+ "logps/chosen": -393.90570068359375,
45
+ "logps/rejected": -215.60818481445312,
46
+ "loss": 0.3403,
47
+ "rewards/accuracies": 0.6499999761581421,
48
+ "rewards/chosen": 0.0024659852497279644,
49
+ "rewards/margins": 0.003918920643627644,
50
+ "rewards/rejected": -0.001452935510315001,
51
  "step": 20
52
  },
53
  {
54
+ "epoch": 0.03,
55
+ "learning_rate": 1.6853932584269663e-07,
56
+ "logits/chosen": -2.758561611175537,
57
+ "logits/rejected": -2.7206640243530273,
58
+ "logps/chosen": -338.9088134765625,
59
+ "logps/rejected": -169.10595703125,
60
+ "loss": 0.3345,
61
+ "rewards/accuracies": 0.7250000238418579,
62
+ "rewards/chosen": 0.010817291215062141,
63
+ "rewards/margins": 0.01906611956655979,
64
+ "rewards/rejected": -0.0082488302141428,
65
  "step": 30
66
  },
67
  {
68
+ "epoch": 0.05,
69
+ "learning_rate": 2.2471910112359549e-07,
70
+ "logits/chosen": -2.7481541633605957,
71
+ "logits/rejected": -2.725372076034546,
72
+ "logps/chosen": -380.46087646484375,
73
+ "logps/rejected": -190.8077392578125,
74
+ "loss": 0.3464,
75
+ "rewards/accuracies": 0.7437499761581421,
76
+ "rewards/chosen": 0.03875667601823807,
77
+ "rewards/margins": 0.07445183396339417,
78
+ "rewards/rejected": -0.035695165395736694,
79
  "step": 40
80
  },
81
  {
82
+ "epoch": 0.06,
83
+ "learning_rate": 2.8089887640449437e-07,
84
+ "logits/chosen": -2.6264801025390625,
85
+ "logits/rejected": -2.6301627159118652,
86
+ "logps/chosen": -317.04522705078125,
87
+ "logps/rejected": -220.34957885742188,
88
+ "loss": 0.3512,
89
+ "rewards/accuracies": 0.5687500238418579,
90
+ "rewards/chosen": -0.008213408291339874,
91
+ "rewards/margins": 0.0798892229795456,
92
+ "rewards/rejected": -0.08810263872146606,
93
  "step": 50
94
  },
95
  {
96
+ "epoch": 0.07,
97
+ "learning_rate": 3.3707865168539325e-07,
98
+ "logits/chosen": -2.611682653427124,
99
+ "logits/rejected": -2.590343475341797,
100
+ "logps/chosen": -311.91217041015625,
101
+ "logps/rejected": -191.47933959960938,
102
+ "loss": 0.3306,
103
+ "rewards/accuracies": 0.6937500238418579,
104
+ "rewards/chosen": -0.010586333461105824,
105
+ "rewards/margins": 0.19717885553836823,
106
+ "rewards/rejected": -0.20776517689228058,
107
  "step": 60
108
  },
109
  {
110
+ "epoch": 0.08,
111
+ "learning_rate": 3.9325842696629214e-07,
112
+ "logits/chosen": -2.523609161376953,
113
+ "logits/rejected": -2.4986379146575928,
114
+ "logps/chosen": -387.2811279296875,
115
+ "logps/rejected": -237.1663818359375,
116
+ "loss": 0.2949,
117
+ "rewards/accuracies": 0.699999988079071,
118
+ "rewards/chosen": -0.10780398547649384,
119
+ "rewards/margins": 0.32766008377075195,
120
+ "rewards/rejected": -0.4354640543460846,
121
  "step": 70
122
  },
123
  {
124
+ "epoch": 0.09,
125
+ "learning_rate": 4.4943820224719097e-07,
126
+ "logits/chosen": -2.5573625564575195,
127
+ "logits/rejected": -2.5448756217956543,
128
+ "logps/chosen": -370.834228515625,
129
+ "logps/rejected": -265.3526611328125,
130
+ "loss": 0.2472,
131
+ "rewards/accuracies": 0.612500011920929,
132
+ "rewards/chosen": -0.2603791356086731,
133
+ "rewards/margins": 0.3768497407436371,
134
+ "rewards/rejected": -0.6372288465499878,
135
  "step": 80
136
  },
137
  {
138
+ "epoch": 0.1,
139
+ "learning_rate": 4.999980431020109e-07,
140
+ "logits/chosen": -2.5590720176696777,
141
+ "logits/rejected": -2.5129566192626953,
142
+ "logps/chosen": -386.1466064453125,
143
+ "logps/rejected": -303.9127197265625,
144
+ "loss": 0.1973,
145
+ "rewards/accuracies": 0.768750011920929,
146
+ "rewards/chosen": -0.5000510215759277,
147
+ "rewards/margins": 0.4384336471557617,
148
+ "rewards/rejected": -0.938484787940979,
149
  "step": 90
150
  },
151
  {
152
+ "epoch": 0.11,
153
+ "learning_rate": 4.997632524101301e-07,
154
+ "logits/chosen": -2.4931159019470215,
155
+ "logits/rejected": -2.468529224395752,
156
+ "logps/chosen": -443.2901306152344,
157
+ "logps/rejected": -325.82012939453125,
158
+ "loss": 0.1395,
159
  "rewards/accuracies": 0.7250000238418579,
160
+ "rewards/chosen": -0.6497722864151001,
161
+ "rewards/margins": 0.7651033997535706,
162
+ "rewards/rejected": -1.4148756265640259,
163
  "step": 100
164
  },
165
  {
166
+ "epoch": 0.12,
167
+ "learning_rate": 4.991375032514749e-07,
168
+ "logits/chosen": -2.4719808101654053,
169
+ "logits/rejected": -2.42130708694458,
170
+ "logps/chosen": -444.5228576660156,
171
+ "logps/rejected": -351.6055603027344,
172
+ "loss": 0.1286,
173
+ "rewards/accuracies": 0.6812499761581421,
174
+ "rewards/chosen": -0.9098852276802063,
175
+ "rewards/margins": 0.6541920900344849,
176
+ "rewards/rejected": -1.5640771389007568,
177
+ "step": 110
 
 
178
  },
179
  {
180
+ "epoch": 0.14,
181
+ "learning_rate": 4.98121775121344e-07,
182
+ "logits/chosen": -2.4348862171173096,
183
+ "logits/rejected": -2.4189703464508057,
184
+ "logps/chosen": -401.9959716796875,
185
+ "logps/rejected": -356.4914245605469,
186
+ "loss": 0.1242,
187
+ "rewards/accuracies": 0.731249988079071,
188
+ "rewards/chosen": -0.8814595937728882,
189
+ "rewards/margins": 0.7036622762680054,
190
+ "rewards/rejected": -1.5851218700408936,
191
+ "step": 120
192
+ },
193
+ {
194
+ "epoch": 0.15,
195
+ "learning_rate": 4.96717657955441e-07,
196
+ "logits/chosen": -2.432359218597412,
197
+ "logits/rejected": -2.3951916694641113,
198
+ "logps/chosen": -467.46258544921875,
199
+ "logps/rejected": -365.39935302734375,
200
+ "loss": 0.1171,
201
+ "rewards/accuracies": 0.7250000238418579,
202
+ "rewards/chosen": -0.7459506988525391,
203
+ "rewards/margins": 0.9494789242744446,
204
+ "rewards/rejected": -1.6954295635223389,
205
+ "step": 130
206
+ },
207
+ {
208
+ "epoch": 0.16,
209
+ "learning_rate": 4.949273496411216e-07,
210
+ "logits/chosen": -2.4169604778289795,
211
+ "logits/rejected": -2.3435044288635254,
212
+ "logps/chosen": -457.04315185546875,
213
+ "logps/rejected": -351.8127136230469,
214
+ "loss": 0.1122,
215
+ "rewards/accuracies": 0.768750011920929,
216
+ "rewards/chosen": -0.8486078381538391,
217
+ "rewards/margins": 0.9041606783866882,
218
+ "rewards/rejected": -1.7527685165405273,
219
+ "step": 140
220
+ },
221
+ {
222
+ "epoch": 0.17,
223
+ "learning_rate": 4.927536525770046e-07,
224
+ "logits/chosen": -2.3999485969543457,
225
+ "logits/rejected": -2.3312039375305176,
226
+ "logps/chosen": -469.7005310058594,
227
+ "logps/rejected": -392.58245849609375,
228
+ "loss": 0.0875,
229
+ "rewards/accuracies": 0.6937500238418579,
230
+ "rewards/chosen": -1.270356297492981,
231
+ "rewards/margins": 0.873481273651123,
232
+ "rewards/rejected": -2.1438372135162354,
233
+ "step": 150
234
+ },
235
+ {
236
+ "epoch": 0.18,
237
+ "learning_rate": 4.901999692863326e-07,
238
+ "logits/chosen": -2.3253886699676514,
239
+ "logits/rejected": -2.282778263092041,
240
+ "logps/chosen": -426.901611328125,
241
+ "logps/rejected": -402.61676025390625,
242
+ "loss": 0.0988,
243
+ "rewards/accuracies": 0.6875,
244
+ "rewards/chosen": -0.9533098936080933,
245
+ "rewards/margins": 1.0418169498443604,
246
+ "rewards/rejected": -1.995126724243164,
247
+ "step": 160
248
+ },
249
+ {
250
+ "epoch": 0.19,
251
+ "learning_rate": 4.872702970909464e-07,
252
+ "logits/chosen": -2.2736876010894775,
253
+ "logits/rejected": -2.252612352371216,
254
+ "logps/chosen": -419.8299255371094,
255
+ "logps/rejected": -376.61456298828125,
256
+ "loss": 0.1007,
257
+ "rewards/accuracies": 0.737500011920929,
258
+ "rewards/chosen": -1.1481902599334717,
259
+ "rewards/margins": 0.816639244556427,
260
+ "rewards/rejected": -1.964829444885254,
261
+ "step": 170
262
+ },
263
+ {
264
+ "epoch": 0.2,
265
+ "learning_rate": 4.839692218542131e-07,
266
+ "logits/chosen": -2.3057937622070312,
267
+ "logits/rejected": -2.2593424320220947,
268
+ "logps/chosen": -515.2421264648438,
269
+ "logps/rejected": -416.0797424316406,
270
+ "loss": 0.0914,
271
  "rewards/accuracies": 0.699999988079071,
272
+ "rewards/chosen": -1.2467437982559204,
273
+ "rewards/margins": 1.0007789134979248,
274
+ "rewards/rejected": -2.2475228309631348,
275
+ "step": 180
276
+ },
277
+ {
278
+ "epoch": 0.22,
279
+ "learning_rate": 4.803019108026997e-07,
280
+ "logits/chosen": -2.3358588218688965,
281
+ "logits/rejected": -2.2952218055725098,
282
+ "logps/chosen": -413.60198974609375,
283
+ "logps/rejected": -376.8982849121094,
284
+ "loss": 0.1101,
285
+ "rewards/accuracies": 0.768750011920929,
286
+ "rewards/chosen": -0.8951161503791809,
287
+ "rewards/margins": 0.9150535464286804,
288
+ "rewards/rejected": -1.8101698160171509,
289
+ "step": 190
290
+ },
291
+ {
292
+ "epoch": 0.23,
293
+ "learning_rate": 4.7627410443782887e-07,
294
+ "logits/chosen": -2.278486490249634,
295
+ "logits/rejected": -2.231799364089966,
296
+ "logps/chosen": -481.43267822265625,
297
+ "logps/rejected": -418.29400634765625,
298
+ "loss": 0.088,
299
+ "rewards/accuracies": 0.706250011920929,
300
+ "rewards/chosen": -1.1549925804138184,
301
+ "rewards/margins": 0.9669499397277832,
302
+ "rewards/rejected": -2.1219422817230225,
303
+ "step": 200
304
+ },
305
+ {
306
+ "epoch": 0.24,
307
+ "learning_rate": 4.7189210755018034e-07,
308
+ "logits/chosen": -2.2762014865875244,
309
+ "logits/rejected": -2.2429184913635254,
310
+ "logps/chosen": -479.207763671875,
311
+ "logps/rejected": -468.08319091796875,
312
+ "loss": 0.0741,
313
+ "rewards/accuracies": 0.7875000238418579,
314
+ "rewards/chosen": -1.3922666311264038,
315
+ "rewards/margins": 1.2790825366973877,
316
+ "rewards/rejected": -2.671349048614502,
317
+ "step": 210
318
  },
319
  {
320
  "epoch": 0.25,
321
+ "learning_rate": 4.671627793504988e-07,
322
+ "logits/chosen": -2.2565343379974365,
323
+ "logits/rejected": -2.201573610305786,
324
+ "logps/chosen": -496.92071533203125,
325
+ "logps/rejected": -468.92767333984375,
326
+ "loss": 0.0725,
327
  "rewards/accuracies": 0.762499988079071,
328
+ "rewards/chosen": -1.47738778591156,
329
+ "rewards/margins": 1.299922227859497,
330
+ "rewards/rejected": -2.7773098945617676,
331
+ "step": 220
332
+ },
333
+ {
334
+ "epoch": 0.26,
335
+ "learning_rate": 4.6209352273286095e-07,
336
+ "logits/chosen": -2.2663636207580566,
337
+ "logits/rejected": -2.2125396728515625,
338
+ "logps/chosen": -488.028076171875,
339
+ "logps/rejected": -457.62615966796875,
340
+ "loss": 0.0718,
341
+ "rewards/accuracies": 0.7124999761581421,
342
+ "rewards/chosen": -1.5378353595733643,
343
+ "rewards/margins": 1.093570351600647,
344
+ "rewards/rejected": -2.6314055919647217,
345
+ "step": 230
346
  },
347
  {
348
  "epoch": 0.27,
349
+ "learning_rate": 4.56692272686805e-07,
350
+ "logits/chosen": -2.222353219985962,
351
+ "logits/rejected": -2.175215244293213,
352
+ "logps/chosen": -495.33294677734375,
353
+ "logps/rejected": -462.1837463378906,
354
+ "loss": 0.0534,
355
+ "rewards/accuracies": 0.75,
356
+ "rewards/chosen": -1.8188711404800415,
357
+ "rewards/margins": 1.0099431276321411,
358
+ "rewards/rejected": -2.8288140296936035,
359
+ "step": 240
360
+ },
361
+ {
362
+ "epoch": 0.28,
363
+ "learning_rate": 4.5096748387656326e-07,
364
+ "logits/chosen": -2.1846108436584473,
365
+ "logits/rejected": -2.120591402053833,
366
+ "logps/chosen": -542.7911376953125,
367
+ "logps/rejected": -492.59613037109375,
368
+ "loss": 0.0444,
369
+ "rewards/accuracies": 0.731249988079071,
370
+ "rewards/chosen": -2.2211267948150635,
371
+ "rewards/margins": 1.0178402662277222,
372
+ "rewards/rejected": -3.238966703414917,
373
+ "step": 250
374
  },
375
  {
376
  "epoch": 0.29,
377
+ "learning_rate": 4.4492811740683877e-07,
378
+ "logits/chosen": -2.1908137798309326,
379
+ "logits/rejected": -2.134983777999878,
380
+ "logps/chosen": -579.2075805664062,
381
+ "logps/rejected": -490.6512756347656,
382
+ "loss": 0.0416,
383
+ "rewards/accuracies": 0.6875,
384
+ "rewards/chosen": -2.233696460723877,
385
+ "rewards/margins": 0.9735918045043945,
386
+ "rewards/rejected": -3.2072887420654297,
387
+ "step": 260
388
  },
389
  {
390
  "epoch": 0.31,
391
+ "learning_rate": 4.3858362679584354e-07,
392
+ "logits/chosen": -2.219695568084717,
393
+ "logits/rejected": -2.162471294403076,
394
+ "logps/chosen": -540.0610961914062,
395
+ "logps/rejected": -505.3155822753906,
396
+ "loss": 0.0501,
397
+ "rewards/accuracies": 0.7124999761581421,
398
+ "rewards/chosen": -2.0654993057250977,
399
+ "rewards/margins": 1.0172322988510132,
400
+ "rewards/rejected": -3.082731246948242,
401
+ "step": 270
402
+ },
403
+ {
404
+ "epoch": 0.32,
405
+ "learning_rate": 4.3194394317755245e-07,
406
+ "logits/chosen": -2.1336846351623535,
407
+ "logits/rejected": -2.112194538116455,
408
+ "logps/chosen": -507.14862060546875,
409
+ "logps/rejected": -446.50244140625,
410
+ "loss": 0.0488,
411
+ "rewards/accuracies": 0.699999988079071,
412
+ "rewards/chosen": -1.9484211206436157,
413
+ "rewards/margins": 0.8502570986747742,
414
+ "rewards/rejected": -2.798678159713745,
415
+ "step": 280
416
  },
417
  {
418
  "epoch": 0.33,
419
+ "learning_rate": 4.2501945975633914e-07,
420
+ "logits/chosen": -2.2120940685272217,
421
+ "logits/rejected": -2.1599128246307373,
422
+ "logps/chosen": -565.96923828125,
423
+ "logps/rejected": -511.15875244140625,
424
+ "loss": 0.0673,
425
+ "rewards/accuracies": 0.731249988079071,
426
+ "rewards/chosen": -1.7033636569976807,
427
+ "rewards/margins": 1.2502896785736084,
428
+ "rewards/rejected": -2.95365309715271,
429
+ "step": 290
430
+ },
431
+ {
432
+ "epoch": 0.34,
433
+ "learning_rate": 4.1782101553832405e-07,
434
+ "logits/chosen": -2.229743480682373,
435
+ "logits/rejected": -2.16943621635437,
436
+ "logps/chosen": -531.8292846679688,
437
+ "logps/rejected": -456.9198303222656,
438
+ "loss": 0.0744,
439
+ "rewards/accuracies": 0.768750011920929,
440
+ "rewards/chosen": -1.5255606174468994,
441
+ "rewards/margins": 1.3379647731781006,
442
+ "rewards/rejected": -2.863525867462158,
443
+ "step": 300
444
+ },
445
+ {
446
+ "epoch": 0.35,
447
+ "learning_rate": 4.103598783649029e-07,
448
+ "logits/chosen": -2.1273748874664307,
449
+ "logits/rejected": -2.0621378421783447,
450
+ "logps/chosen": -449.0135803222656,
451
+ "logps/rejected": -464.1368103027344,
452
+ "loss": 0.0535,
453
+ "rewards/accuracies": 0.7437499761581421,
454
+ "rewards/chosen": -1.700299859046936,
455
+ "rewards/margins": 1.344228982925415,
456
+ "rewards/rejected": -3.0445287227630615,
457
+ "step": 310
458
  },
459
  {
460
  "epoch": 0.36,
461
+ "learning_rate": 4.026477272750119e-07,
462
+ "logits/chosen": -2.1758084297180176,
463
+ "logits/rejected": -2.1122355461120605,
464
+ "logps/chosen": -523.4859008789062,
465
+ "logps/rejected": -473.9483947753906,
466
+ "loss": 0.0521,
467
+ "rewards/accuracies": 0.7250000238418579,
468
+ "rewards/chosen": -1.8398901224136353,
469
+ "rewards/margins": 1.2361342906951904,
470
+ "rewards/rejected": -3.0760245323181152,
471
+ "step": 320
472
+ },
473
+ {
474
+ "epoch": 0.37,
475
+ "learning_rate": 3.9469663422373864e-07,
476
+ "logits/chosen": -2.1758205890655518,
477
+ "logits/rejected": -2.1095774173736572,
478
+ "logps/chosen": -526.5242919921875,
479
+ "logps/rejected": -537.9935302734375,
480
+ "loss": 0.0567,
481
+ "rewards/accuracies": 0.706250011920929,
482
+ "rewards/chosen": -2.1948952674865723,
483
+ "rewards/margins": 1.110154390335083,
484
+ "rewards/rejected": -3.3050498962402344,
485
+ "step": 330
486
  },
487
  {
488
  "epoch": 0.38,
489
+ "learning_rate": 3.865190451858954e-07,
490
+ "logits/chosen": -2.1651651859283447,
491
+ "logits/rejected": -2.1235530376434326,
492
+ "logps/chosen": -530.256103515625,
493
+ "logps/rejected": -508.19073486328125,
494
+ "loss": 0.0525,
495
+ "rewards/accuracies": 0.762499988079071,
496
+ "rewards/chosen": -2.0087122917175293,
497
+ "rewards/margins": 1.3509318828582764,
498
+ "rewards/rejected": -3.3596444129943848,
499
+ "step": 340
500
  },
501
  {
502
  "epoch": 0.4,
503
+ "learning_rate": 3.781277606741327e-07,
504
+ "logits/chosen": -2.174168825149536,
505
+ "logits/rejected": -2.1008687019348145,
506
+ "logps/chosen": -523.4888916015625,
507
+ "logps/rejected": -506.63909912109375,
508
+ "loss": 0.0449,
509
+ "rewards/accuracies": 0.737500011920929,
510
+ "rewards/chosen": -2.164114475250244,
511
+ "rewards/margins": 1.164854884147644,
512
+ "rewards/rejected": -3.3289694786071777,
513
+ "step": 350
514
  },
515
  {
516
+ "epoch": 0.41,
517
+ "learning_rate": 3.6953591570208996e-07,
518
+ "logits/chosen": -2.13757586479187,
519
+ "logits/rejected": -2.0624773502349854,
520
+ "logps/chosen": -568.0411376953125,
521
+ "logps/rejected": -503.7386169433594,
522
+ "loss": 0.0426,
523
+ "rewards/accuracies": 0.65625,
524
+ "rewards/chosen": -2.1451027393341064,
525
+ "rewards/margins": 1.2416441440582275,
526
+ "rewards/rejected": -3.386746883392334,
527
+ "step": 360
528
  },
529
  {
530
  "epoch": 0.42,
531
+ "learning_rate": 3.607569592239452e-07,
532
+ "logits/chosen": -2.1109542846679688,
533
+ "logits/rejected": -2.0401806831359863,
534
+ "logps/chosen": -550.1793212890625,
535
+ "logps/rejected": -503.6466369628906,
536
+ "loss": 0.0482,
537
+ "rewards/accuracies": 0.768750011920929,
538
+ "rewards/chosen": -1.9328603744506836,
539
+ "rewards/margins": 1.231170892715454,
540
+ "rewards/rejected": -3.1640312671661377,
541
+ "step": 370
 
 
542
  },
543
  {
544
+ "epoch": 0.43,
545
+ "learning_rate": 3.518046330825494e-07,
546
+ "logits/chosen": -2.1060791015625,
547
+ "logits/rejected": -2.041074514389038,
548
+ "logps/chosen": -535.5892944335938,
549
+ "logps/rejected": -487.30352783203125,
550
+ "loss": 0.0552,
551
  "rewards/accuracies": 0.7250000238418579,
552
+ "rewards/chosen": -1.9271209239959717,
553
+ "rewards/margins": 1.0428855419158936,
554
+ "rewards/rejected": -2.9700064659118652,
555
+ "step": 380
556
+ },
557
+ {
558
+ "epoch": 0.44,
559
+ "learning_rate": 3.4269295049909713e-07,
560
+ "logits/chosen": -2.175748109817505,
561
+ "logits/rejected": -2.112736701965332,
562
+ "logps/chosen": -584.9471435546875,
563
+ "logps/rejected": -521.4114379882812,
564
+ "loss": 0.0535,
565
+ "rewards/accuracies": 0.6937500238418579,
566
+ "rewards/chosen": -1.8646738529205322,
567
+ "rewards/margins": 1.4181945323944092,
568
+ "rewards/rejected": -3.2828681468963623,
569
+ "step": 390
570
+ },
571
+ {
572
+ "epoch": 0.45,
573
+ "learning_rate": 3.3343617413800453e-07,
574
+ "logits/chosen": -2.0954391956329346,
575
+ "logits/rejected": -2.029520034790039,
576
+ "logps/chosen": -522.0698852539062,
577
+ "logps/rejected": -517.2570190429688,
578
+ "loss": 0.0433,
579
+ "rewards/accuracies": 0.8187500238418579,
580
+ "rewards/chosen": -2.0321173667907715,
581
+ "rewards/margins": 1.4304258823394775,
582
+ "rewards/rejected": -3.46254301071167,
583
+ "step": 400
584
  },
585
  {
586
  "epoch": 0.46,
587
+ "learning_rate": 3.2404879378132893e-07,
588
+ "logits/chosen": -2.101567029953003,
589
+ "logits/rejected": -2.044337511062622,
590
+ "logps/chosen": -559.70458984375,
591
+ "logps/rejected": -565.7550048828125,
592
+ "loss": 0.0561,
593
+ "rewards/accuracies": 0.7562500238418579,
594
+ "rewards/chosen": -2.1665139198303223,
595
+ "rewards/margins": 1.347381591796875,
596
+ "rewards/rejected": -3.5138955116271973,
597
+ "step": 410
598
  },
599
  {
600
  "epoch": 0.48,
601
+ "learning_rate": 3.1454550364767894e-07,
602
+ "logits/chosen": -2.1945436000823975,
603
+ "logits/rejected": -2.108241558074951,
604
+ "logps/chosen": -547.8588256835938,
605
+ "logps/rejected": -472.13055419921875,
606
+ "loss": 0.0682,
607
+ "rewards/accuracies": 0.8125,
608
+ "rewards/chosen": -1.5657107830047607,
609
+ "rewards/margins": 1.3424267768859863,
610
+ "rewards/rejected": -2.908137798309326,
611
+ "step": 420
612
+ },
613
+ {
614
+ "epoch": 0.49,
615
+ "learning_rate": 3.049411793911154e-07,
616
+ "logits/chosen": -2.200591564178467,
617
+ "logits/rejected": -2.147153615951538,
618
+ "logps/chosen": -558.2203369140625,
619
+ "logps/rejected": -518.0502319335938,
620
+ "loss": 0.0599,
621
+ "rewards/accuracies": 0.8125,
622
+ "rewards/chosen": -1.8033355474472046,
623
+ "rewards/margins": 1.3239552974700928,
624
+ "rewards/rejected": -3.127290725708008,
625
+ "step": 430
626
  },
627
  {
628
  "epoch": 0.5,
629
+ "learning_rate": 2.9525085481604914e-07,
630
+ "logits/chosen": -2.055802345275879,
631
+ "logits/rejected": -2.034162998199463,
632
+ "logps/chosen": -532.451416015625,
633
+ "logps/rejected": -547.7064819335938,
634
+ "loss": 0.0414,
635
+ "rewards/accuracies": 0.71875,
636
+ "rewards/chosen": -2.4992823600769043,
637
+ "rewards/margins": 0.9650812149047852,
638
+ "rewards/rejected": -3.4643635749816895,
639
+ "step": 440
640
+ },
641
+ {
642
+ "epoch": 0.51,
643
+ "learning_rate": 2.854896983445833e-07,
644
+ "logits/chosen": -2.0957014560699463,
645
+ "logits/rejected": -2.0423591136932373,
646
+ "logps/chosen": -601.10302734375,
647
+ "logps/rejected": -567.8006591796875,
648
+ "loss": 0.0359,
649
+ "rewards/accuracies": 0.699999988079071,
650
+ "rewards/chosen": -2.637305736541748,
651
+ "rewards/margins": 1.1463762521743774,
652
+ "rewards/rejected": -3.783681869506836,
653
+ "step": 450
654
  },
655
  {
656
  "epoch": 0.52,
657
+ "learning_rate": 2.7567298927313654e-07,
658
+ "logits/chosen": -2.131615400314331,
659
+ "logits/rejected": -2.082598924636841,
660
+ "logps/chosen": -597.11474609375,
661
+ "logps/rejected": -561.5462646484375,
662
+ "loss": 0.043,
663
+ "rewards/accuracies": 0.731249988079071,
664
+ "rewards/chosen": -2.3780384063720703,
665
+ "rewards/margins": 1.2381227016448975,
666
+ "rewards/rejected": -3.6161608695983887,
667
+ "step": 460
668
+ },
669
+ {
670
+ "epoch": 0.53,
671
+ "learning_rate": 2.658160938555123e-07,
672
+ "logits/chosen": -2.093458890914917,
673
+ "logits/rejected": -2.061420440673828,
674
+ "logps/chosen": -515.4798583984375,
675
+ "logps/rejected": -517.3043212890625,
676
+ "loss": 0.0434,
677
+ "rewards/accuracies": 0.762499988079071,
678
+ "rewards/chosen": -2.0862958431243896,
679
+ "rewards/margins": 1.3371632099151611,
680
+ "rewards/rejected": -3.42345929145813,
681
+ "step": 470
682
  },
683
  {
684
  "epoch": 0.54,
685
+ "learning_rate": 2.559344412498532e-07,
686
+ "logits/chosen": -2.1327083110809326,
687
+ "logits/rejected": -2.0735764503479004,
688
+ "logps/chosen": -521.3518676757812,
689
+ "logps/rejected": -507.07940673828125,
690
+ "loss": 0.0493,
691
+ "rewards/accuracies": 0.706250011920929,
692
+ "rewards/chosen": -2.167259931564331,
693
+ "rewards/margins": 0.961189866065979,
694
+ "rewards/rejected": -3.1284494400024414,
695
+ "step": 480
696
+ },
697
+ {
698
+ "epoch": 0.55,
699
+ "learning_rate": 2.460434993671294e-07,
700
+ "logits/chosen": -2.1041388511657715,
701
+ "logits/rejected": -2.0494048595428467,
702
+ "logps/chosen": -468.25286865234375,
703
+ "logps/rejected": -456.40069580078125,
704
+ "loss": 0.0565,
705
+ "rewards/accuracies": 0.78125,
706
+ "rewards/chosen": -1.6431152820587158,
707
+ "rewards/margins": 1.1550475358963013,
708
+ "rewards/rejected": -2.7981629371643066,
709
+ "step": 490
710
  },
711
  {
712
  "epoch": 0.57,
713
+ "learning_rate": 2.361587506589672e-07,
714
+ "logits/chosen": -2.136615514755249,
715
+ "logits/rejected": -2.0756921768188477,
716
+ "logps/chosen": -559.8421630859375,
717
+ "logps/rejected": -500.45416259765625,
718
+ "loss": 0.0555,
719
+ "rewards/accuracies": 0.731249988079071,
720
+ "rewards/chosen": -2.011848211288452,
721
+ "rewards/margins": 1.14077889919281,
722
+ "rewards/rejected": -3.1526272296905518,
723
+ "step": 500
724
+ },
725
+ {
726
+ "epoch": 0.58,
727
+ "learning_rate": 2.2629566788271613e-07,
728
+ "logits/chosen": -2.1114137172698975,
729
+ "logits/rejected": -2.0390002727508545,
730
+ "logps/chosen": -583.687255859375,
731
+ "logps/rejected": -564.6419067382812,
732
+ "loss": 0.0469,
733
+ "rewards/accuracies": 0.75,
734
+ "rewards/chosen": -2.1826224327087402,
735
+ "rewards/margins": 1.3830513954162598,
736
+ "rewards/rejected": -3.565673828125,
737
+ "step": 510
738
  },
739
  {
740
  "epoch": 0.59,
741
+ "learning_rate": 2.1646968988169135e-07,
742
+ "logits/chosen": -2.100142240524292,
743
+ "logits/rejected": -2.0500102043151855,
744
+ "logps/chosen": -592.74755859375,
745
+ "logps/rejected": -565.3260498046875,
746
+ "loss": 0.0453,
747
+ "rewards/accuracies": 0.768750011920929,
748
+ "rewards/chosen": -2.248453378677368,
749
+ "rewards/margins": 1.5243793725967407,
750
+ "rewards/rejected": -3.7728323936462402,
751
+ "step": 520
752
+ },
753
+ {
754
+ "epoch": 0.6,
755
+ "learning_rate": 2.0669619741850232e-07,
756
+ "logits/chosen": -2.0300328731536865,
757
+ "logits/rejected": -1.9945085048675537,
758
+ "logps/chosen": -557.4923095703125,
759
+ "logps/rejected": -565.3587646484375,
760
+ "loss": 0.0414,
761
+ "rewards/accuracies": 0.75,
762
+ "rewards/chosen": -2.316817045211792,
763
+ "rewards/margins": 1.3874229192733765,
764
+ "rewards/rejected": -3.704240083694458,
765
+ "step": 530
766
  },
767
  {
768
  "epoch": 0.61,
769
+ "learning_rate": 1.9699048909929518e-07,
770
+ "logits/chosen": -2.041743040084839,
771
+ "logits/rejected": -2.026899814605713,
772
+ "logps/chosen": -549.0650634765625,
773
+ "logps/rejected": -553.6836547851562,
774
+ "loss": 0.0385,
775
+ "rewards/accuracies": 0.78125,
776
+ "rewards/chosen": -2.206735134124756,
777
+ "rewards/margins": 1.487671136856079,
778
+ "rewards/rejected": -3.6944069862365723,
779
+ "step": 540
780
  },
781
  {
782
+ "epoch": 0.62,
783
+ "learning_rate": 1.8736775742659732e-07,
784
+ "logits/chosen": -2.042382001876831,
785
+ "logits/rejected": -1.9802138805389404,
786
+ "logps/chosen": -505.1175842285156,
787
+ "logps/rejected": -516.2354736328125,
788
+ "loss": 0.0492,
789
+ "rewards/accuracies": 0.731249988079071,
790
+ "rewards/chosen": -2.0931038856506348,
791
+ "rewards/margins": 1.3815913200378418,
792
+ "rewards/rejected": -3.4746952056884766,
793
+ "step": 550
794
  },
795
  {
796
  "epoch": 0.63,
797
+ "learning_rate": 1.7784306501824616e-07,
798
+ "logits/chosen": -2.0194315910339355,
799
+ "logits/rejected": -1.9467071294784546,
800
+ "logps/chosen": -560.6709594726562,
801
+ "logps/rejected": -534.3895874023438,
802
+ "loss": 0.0542,
803
+ "rewards/accuracies": 0.768750011920929,
804
+ "rewards/chosen": -2.101309299468994,
805
+ "rewards/margins": 1.3114370107650757,
806
+ "rewards/rejected": -3.412745952606201,
807
+ "step": 560
 
 
808
  },
809
  {
810
  "epoch": 0.65,
811
+ "learning_rate": 1.6843132102963025e-07,
812
+ "logits/chosen": -2.1281192302703857,
813
+ "logits/rejected": -2.0448033809661865,
814
+ "logps/chosen": -538.1578369140625,
815
+ "logps/rejected": -523.7766723632812,
816
+ "loss": 0.0553,
817
+ "rewards/accuracies": 0.824999988079071,
818
+ "rewards/chosen": -1.8923267126083374,
819
+ "rewards/margins": 1.5170024633407593,
820
+ "rewards/rejected": -3.4093291759490967,
821
+ "step": 570
822
+ },
823
+ {
824
+ "epoch": 0.66,
825
+ "learning_rate": 1.591472578161458e-07,
826
+ "logits/chosen": -2.052722454071045,
827
+ "logits/rejected": -2.037425994873047,
828
+ "logps/chosen": -544.02197265625,
829
+ "logps/rejected": -537.7569580078125,
830
+ "loss": 0.0538,
831
+ "rewards/accuracies": 0.7749999761581421,
832
+ "rewards/chosen": -2.122720241546631,
833
+ "rewards/margins": 1.2477385997772217,
834
+ "rewards/rejected": -3.3704590797424316,
835
+ "step": 580
836
  },
837
  {
838
  "epoch": 0.67,
839
+ "learning_rate": 1.5000540787240274e-07,
840
+ "logits/chosen": -2.1312813758850098,
841
+ "logits/rejected": -2.0585029125213623,
842
+ "logps/chosen": -578.3094482421875,
843
+ "logps/rejected": -522.5347900390625,
844
+ "loss": 0.0505,
845
+ "rewards/accuracies": 0.699999988079071,
846
+ "rewards/chosen": -2.313297986984253,
847
+ "rewards/margins": 1.069830298423767,
848
+ "rewards/rejected": -3.3831279277801514,
849
+ "step": 590
850
+ },
851
+ {
852
+ "epoch": 0.68,
853
+ "learning_rate": 1.410200810842749e-07,
854
+ "logits/chosen": -2.0706946849823,
855
+ "logits/rejected": -2.0253984928131104,
856
+ "logps/chosen": -567.4093627929688,
857
+ "logps/rejected": -551.6956787109375,
858
+ "loss": 0.0423,
859
+ "rewards/accuracies": 0.793749988079071,
860
+ "rewards/chosen": -2.204864025115967,
861
+ "rewards/margins": 1.2818561792373657,
862
+ "rewards/rejected": -3.486720561981201,
863
+ "step": 600
864
  },
865
  {
866
  "epoch": 0.69,
867
+ "learning_rate": 1.322053423294041e-07,
868
+ "logits/chosen": -2.1002461910247803,
869
+ "logits/rejected": -2.0568830966949463,
870
+ "logps/chosen": -551.1517333984375,
871
+ "logps/rejected": -533.7266235351562,
872
+ "loss": 0.049,
873
+ "rewards/accuracies": 0.6937500238418579,
874
+ "rewards/chosen": -2.393352746963501,
875
+ "rewards/margins": 1.0419074296951294,
876
+ "rewards/rejected": -3.4352595806121826,
877
+ "step": 610
878
+ },
879
+ {
880
+ "epoch": 0.7,
881
+ "learning_rate": 1.2357498946121905e-07,
882
+ "logits/chosen": -2.088925838470459,
883
+ "logits/rejected": -2.03609037399292,
884
+ "logps/chosen": -612.4100952148438,
885
+ "logps/rejected": -554.9931640625,
886
+ "loss": 0.047,
887
+ "rewards/accuracies": 0.768750011920929,
888
+ "rewards/chosen": -2.3001351356506348,
889
+ "rewards/margins": 1.3683874607086182,
890
+ "rewards/rejected": -3.668523073196411,
891
+ "step": 620
892
  },
893
  {
894
  "epoch": 0.71,
895
+ "learning_rate": 1.1514253171093161e-07,
896
+ "logits/chosen": -2.1326541900634766,
897
+ "logits/rejected": -2.0758585929870605,
898
+ "logps/chosen": -570.8392333984375,
899
+ "logps/rejected": -574.4908447265625,
900
+ "loss": 0.0533,
901
+ "rewards/accuracies": 0.768750011920929,
902
+ "rewards/chosen": -2.2075510025024414,
903
+ "rewards/margins": 1.380297064781189,
904
+ "rewards/rejected": -3.587847948074341,
905
+ "step": 630
906
  },
907
  {
908
+ "epoch": 0.72,
909
+ "learning_rate": 1.0692116854131883e-07,
910
+ "logits/chosen": -2.0427284240722656,
911
+ "logits/rejected": -1.9922561645507812,
912
+ "logps/chosen": -528.8707275390625,
913
+ "logps/rejected": -516.8524169921875,
914
+ "loss": 0.0486,
915
+ "rewards/accuracies": 0.768750011920929,
916
+ "rewards/chosen": -2.0978333950042725,
917
+ "rewards/margins": 1.2833999395370483,
918
+ "rewards/rejected": -3.3812332153320312,
919
+ "step": 640
920
+ },
921
+ {
922
+ "epoch": 0.74,
923
+ "learning_rate": 9.89237689853889e-08,
924
+ "logits/chosen": -2.1081204414367676,
925
+ "logits/rejected": -2.0486502647399902,
926
+ "logps/chosen": -574.6364135742188,
927
+ "logps/rejected": -567.418212890625,
928
+ "loss": 0.0473,
929
+ "rewards/accuracies": 0.800000011920929,
930
+ "rewards/chosen": -2.1617255210876465,
931
+ "rewards/margins": 1.530967354774475,
932
+ "rewards/rejected": -3.692692995071411,
933
+ "step": 650
934
  },
935
  {
936
  "epoch": 0.75,
937
+ "learning_rate": 9.11628515022765e-08,
938
+ "logits/chosen": -2.0846991539001465,
939
+ "logits/rejected": -2.0398693084716797,
940
+ "logps/chosen": -597.761962890625,
941
+ "logps/rejected": -579.8795166015625,
942
+ "loss": 0.0522,
943
+ "rewards/accuracies": 0.78125,
944
+ "rewards/chosen": -2.5013487339019775,
945
+ "rewards/margins": 1.258310079574585,
946
+ "rewards/rejected": -3.7596592903137207,
947
+ "step": 660
948
+ },
949
+ {
950
+ "epoch": 0.76,
951
+ "learning_rate": 8.365056438189486e-08,
952
+ "logits/chosen": -2.1001641750335693,
953
+ "logits/rejected": -2.0509486198425293,
954
+ "logps/chosen": -590.825439453125,
955
+ "logps/rejected": -565.3231201171875,
956
+ "loss": 0.0481,
957
+ "rewards/accuracies": 0.7124999761581421,
958
+ "rewards/chosen": -2.456451654434204,
959
+ "rewards/margins": 1.0512244701385498,
960
+ "rewards/rejected": -3.507675886154175,
961
+ "step": 670
962
  },
963
  {
964
  "epoch": 0.77,
965
+ "learning_rate": 7.639866672902101e-08,
966
+ "logits/chosen": -2.059082269668579,
967
+ "logits/rejected": -2.0069167613983154,
968
+ "logps/chosen": -529.4530029296875,
969
+ "logps/rejected": -553.0791015625,
970
+ "loss": 0.0537,
971
+ "rewards/accuracies": 0.8125,
972
+ "rewards/chosen": -2.3729891777038574,
973
+ "rewards/margins": 1.3166040182113647,
974
+ "rewards/rejected": -3.6895930767059326,
975
+ "step": 680
976
+ },
977
+ {
978
+ "epoch": 0.78,
979
+ "learning_rate": 6.941851005657851e-08,
980
+ "logits/chosen": -2.059549570083618,
981
+ "logits/rejected": -1.993088722229004,
982
+ "logps/chosen": -531.9309692382812,
983
+ "logps/rejected": -547.8716430664062,
984
+ "loss": 0.0507,
985
+ "rewards/accuracies": 0.78125,
986
+ "rewards/chosen": -2.190737247467041,
987
+ "rewards/margins": 1.2514276504516602,
988
+ "rewards/rejected": -3.442164897918701,
989
+ "step": 690
990
+ },
991
+ {
992
+ "epoch": 0.79,
993
+ "learning_rate": 6.272102051693051e-08,
994
+ "logits/chosen": -2.078123092651367,
995
+ "logits/rejected": -2.0465798377990723,
996
+ "logps/chosen": -528.0089721679688,
997
+ "logps/rejected": -525.1822509765625,
998
+ "loss": 0.0489,
999
+ "rewards/accuracies": 0.7437499761581421,
1000
+ "rewards/chosen": -2.0736889839172363,
1001
+ "rewards/margins": 1.2697416543960571,
1002
+ "rewards/rejected": -3.343430995941162,
1003
+ "step": 700
1004
  },
1005
  {
1006
  "epoch": 0.8,
1007
+ "learning_rate": 5.6316681798995844e-08,
1008
+ "logits/chosen": -2.1082401275634766,
1009
+ "logits/rejected": -2.036336898803711,
1010
+ "logps/chosen": -535.8514404296875,
1011
+ "logps/rejected": -538.2098388671875,
1012
+ "loss": 0.0525,
1013
+ "rewards/accuracies": 0.800000011920929,
1014
+ "rewards/chosen": -2.207425594329834,
1015
+ "rewards/margins": 1.2546026706695557,
1016
+ "rewards/rejected": -3.4620280265808105,
1017
+ "step": 710
1018
  },
1019
  {
1020
+ "epoch": 0.81,
1021
+ "learning_rate": 5.0215518717961256e-08,
1022
+ "logits/chosen": -2.0293445587158203,
1023
+ "logits/rejected": -2.0118401050567627,
1024
+ "logps/chosen": -534.4603271484375,
1025
+ "logps/rejected": -497.52862548828125,
1026
+ "loss": 0.0513,
1027
+ "rewards/accuracies": 0.831250011920929,
1028
+ "rewards/chosen": -1.9929277896881104,
1029
+ "rewards/margins": 1.2197530269622803,
1030
+ "rewards/rejected": -3.2126808166503906,
1031
+ "step": 720
1032
+ },
1033
+ {
1034
+ "epoch": 0.83,
1035
+ "learning_rate": 4.4427081523275925e-08,
1036
+ "logits/chosen": -2.111539363861084,
1037
+ "logits/rejected": -2.0251288414001465,
1038
+ "logps/chosen": -571.8197021484375,
1039
+ "logps/rejected": -563.7077026367188,
1040
+ "loss": 0.0537,
1041
  "rewards/accuracies": 0.7437499761581421,
1042
+ "rewards/chosen": -2.099212646484375,
1043
+ "rewards/margins": 1.648705244064331,
1044
+ "rewards/rejected": -3.747917890548706,
1045
+ "step": 730
1046
  },
1047
  {
1048
  "epoch": 0.84,
1049
+ "learning_rate": 3.896043094949061e-08,
1050
+ "logits/chosen": -2.112701654434204,
1051
+ "logits/rejected": -2.0454134941101074,
1052
+ "logps/chosen": -555.4544067382812,
1053
+ "logps/rejected": -528.0631713867188,
1054
+ "loss": 0.0544,
1055
+ "rewards/accuracies": 0.75,
1056
+ "rewards/chosen": -2.158057451248169,
1057
+ "rewards/margins": 1.1910467147827148,
1058
+ "rewards/rejected": -3.3491039276123047,
1059
+ "step": 740
1060
  },
1061
  {
1062
+ "epoch": 0.85,
1063
+ "learning_rate": 3.3824124033343557e-08,
1064
+ "logits/chosen": -2.101064682006836,
1065
+ "logits/rejected": -2.035494089126587,
1066
+ "logps/chosen": -523.05517578125,
1067
+ "logps/rejected": -508.50653076171875,
1068
+ "loss": 0.0473,
1069
+ "rewards/accuracies": 0.7562500238418579,
1070
+ "rewards/chosen": -2.1078267097473145,
1071
+ "rewards/margins": 1.258143424987793,
1072
+ "rewards/rejected": -3.3659701347351074,
1073
+ "step": 750
 
 
1074
  },
1075
  {
1076
  "epoch": 0.86,
1077
+ "learning_rate": 2.9026200719291904e-08,
1078
+ "logits/chosen": -2.0927655696868896,
1079
+ "logits/rejected": -2.014799118041992,
1080
+ "logps/chosen": -526.6090087890625,
1081
+ "logps/rejected": -510.7976989746094,
1082
+ "loss": 0.0476,
1083
+ "rewards/accuracies": 0.8187500238418579,
1084
+ "rewards/chosen": -2.0697100162506104,
1085
+ "rewards/margins": 1.2819905281066895,
1086
+ "rewards/rejected": -3.3517003059387207,
1087
+ "step": 760
1088
+ },
1089
+ {
1090
+ "epoch": 0.87,
1091
+ "learning_rate": 2.4574171274456433e-08,
1092
+ "logits/chosen": -2.127093553543091,
1093
+ "logits/rejected": -2.072615146636963,
1094
+ "logps/chosen": -579.9869995117188,
1095
+ "logps/rejected": -531.943359375,
1096
+ "loss": 0.0517,
1097
+ "rewards/accuracies": 0.7562500238418579,
1098
+ "rewards/chosen": -2.103449583053589,
1099
+ "rewards/margins": 1.3205053806304932,
1100
+ "rewards/rejected": -3.423954725265503,
1101
+ "step": 770
1102
  },
1103
  {
1104
  "epoch": 0.88,
1105
+ "learning_rate": 2.047500453267881e-08,
1106
+ "logits/chosen": -2.0992608070373535,
1107
+ "logits/rejected": -2.035027265548706,
1108
+ "logps/chosen": -576.8822021484375,
1109
+ "logps/rejected": -549.5172119140625,
1110
+ "loss": 0.0442,
1111
+ "rewards/accuracies": 0.7437499761581421,
1112
+ "rewards/chosen": -2.2390971183776855,
1113
+ "rewards/margins": 1.385114073753357,
1114
+ "rewards/rejected": -3.624211072921753,
1115
+ "step": 780
1116
  },
1117
  {
1118
+ "epoch": 0.89,
1119
+ "learning_rate": 1.673511698609292e-08,
1120
+ "logits/chosen": -2.1341183185577393,
1121
+ "logits/rejected": -2.060023784637451,
1122
+ "logps/chosen": -565.7227172851562,
1123
+ "logps/rejected": -535.3021850585938,
1124
+ "loss": 0.0475,
1125
+ "rewards/accuracies": 0.75,
1126
+ "rewards/chosen": -2.2470290660858154,
1127
+ "rewards/margins": 1.2771549224853516,
1128
+ "rewards/rejected": -3.524184465408325,
1129
+ "step": 790
1130
+ },
1131
+ {
1132
+ "epoch": 0.91,
1133
+ "learning_rate": 1.3360362741285769e-08,
1134
+ "logits/chosen": -2.0838260650634766,
1135
+ "logits/rejected": -2.0251381397247314,
1136
+ "logps/chosen": -578.9539794921875,
1137
+ "logps/rejected": -542.1248779296875,
1138
+ "loss": 0.0468,
1139
+ "rewards/accuracies": 0.6875,
1140
+ "rewards/chosen": -2.277639865875244,
1141
+ "rewards/margins": 1.2334930896759033,
1142
+ "rewards/rejected": -3.5111324787139893,
1143
+ "step": 800
1144
  },
1145
  {
1146
  "epoch": 0.92,
1147
+ "learning_rate": 1.0356024355769433e-08,
1148
+ "logits/chosen": -2.093954086303711,
1149
+ "logits/rejected": -2.043708324432373,
1150
+ "logps/chosen": -584.259521484375,
1151
+ "logps/rejected": -580.5767211914062,
1152
+ "loss": 0.0458,
1153
+ "rewards/accuracies": 0.768750011920929,
1154
+ "rewards/chosen": -2.1630260944366455,
1155
+ "rewards/margins": 1.6276559829711914,
1156
+ "rewards/rejected": -3.790682315826416,
1157
+ "step": 810
1158
+ },
1159
+ {
1160
+ "epoch": 0.93,
1161
+ "learning_rate": 7.726804569108597e-09,
1162
+ "logits/chosen": -2.112128973007202,
1163
+ "logits/rejected": -2.0508077144622803,
1164
+ "logps/chosen": -558.3360595703125,
1165
+ "logps/rejected": -544.3916015625,
1166
+ "loss": 0.0469,
1167
+ "rewards/accuracies": 0.793749988079071,
1168
+ "rewards/chosen": -2.1536641120910645,
1169
+ "rewards/margins": 1.3970838785171509,
1170
+ "rewards/rejected": -3.5507476329803467,
1171
+ "step": 820
1172
  },
1173
  {
1174
  "epoch": 0.94,
1175
+ "learning_rate": 5.476818941645561e-09,
1176
+ "logits/chosen": -2.100653648376465,
1177
+ "logits/rejected": -2.021291732788086,
1178
+ "logps/chosen": -588.8988037109375,
1179
+ "logps/rejected": -540.7061767578125,
1180
+ "loss": 0.0429,
1181
+ "rewards/accuracies": 0.7437499761581421,
1182
+ "rewards/chosen": -2.19673490524292,
1183
+ "rewards/margins": 1.4355782270431519,
1184
+ "rewards/rejected": -3.632312774658203,
1185
+ "step": 830
1186
+ },
1187
+ {
1188
+ "epoch": 0.95,
1189
+ "learning_rate": 3.609589412347347e-09,
1190
+ "logits/chosen": -2.109170913696289,
1191
+ "logits/rejected": -2.0334959030151367,
1192
+ "logps/chosen": -578.6869506835938,
1193
+ "logps/rejected": -536.8270263671875,
1194
+ "loss": 0.0477,
1195
+ "rewards/accuracies": 0.737500011920929,
1196
+ "rewards/chosen": -2.1644272804260254,
1197
+ "rewards/margins": 1.3395423889160156,
1198
+ "rewards/rejected": -3.50396990776062,
1199
+ "step": 840
1200
  },
1201
  {
1202
  "epoch": 0.96,
1203
+ "learning_rate": 2.1280387858572667e-09,
1204
+ "logits/chosen": -2.0832810401916504,
1205
+ "logits/rejected": -2.0087838172912598,
1206
+ "logps/chosen": -581.9695434570312,
1207
+ "logps/rejected": -542.6390380859375,
1208
+ "loss": 0.0409,
1209
+ "rewards/accuracies": 0.75,
1210
+ "rewards/chosen": -2.3538167476654053,
1211
+ "rewards/margins": 1.3839699029922485,
1212
+ "rewards/rejected": -3.737786054611206,
1213
+ "step": 850
1214
+ },
1215
+ {
1216
+ "epoch": 0.97,
1217
+ "learning_rate": 1.03448615738172e-09,
1218
+ "logits/chosen": -2.070016384124756,
1219
+ "logits/rejected": -1.9977195262908936,
1220
+ "logps/chosen": -577.4140625,
1221
+ "logps/rejected": -548.4049682617188,
1222
+ "loss": 0.0434,
1223
+ "rewards/accuracies": 0.7437499761581421,
1224
+ "rewards/chosen": -2.305614709854126,
1225
+ "rewards/margins": 1.2134736776351929,
1226
+ "rewards/rejected": -3.51908802986145,
1227
+ "step": 860
1228
  },
1229
  {
1230
  "epoch": 0.98,
1231
+ "learning_rate": 3.3064328257259575e-10,
1232
+ "logits/chosen": -2.05591082572937,
1233
+ "logits/rejected": -2.0076966285705566,
1234
+ "logps/chosen": -535.1695556640625,
1235
+ "logps/rejected": -527.5564575195312,
1236
+ "loss": 0.0505,
1237
+ "rewards/accuracies": 0.706250011920929,
1238
+ "rewards/chosen": -2.3522868156433105,
1239
+ "rewards/margins": 1.0536117553710938,
1240
+ "rewards/rejected": -3.4058985710144043,
1241
+ "step": 870
1242
+ },
1243
+ {
1244
+ "epoch": 1.0,
1245
+ "learning_rate": 1.7611898088715216e-11,
1246
+ "logits/chosen": -2.0798163414001465,
1247
+ "logits/rejected": -2.035545825958252,
1248
+ "logps/chosen": -553.0671997070312,
1249
+ "logps/rejected": -526.8216552734375,
1250
+ "loss": 0.0432,
1251
+ "rewards/accuracies": 0.737500011920929,
1252
+ "rewards/chosen": -2.1250925064086914,
1253
+ "rewards/margins": 1.3694770336151123,
1254
+ "rewards/rejected": -3.4945693016052246,
1255
+ "step": 880
1256
  },
1257
  {
1258
  "epoch": 1.0,
1259
+ "step": 883,
1260
  "total_flos": 0.0,
1261
+ "train_loss": 0.08422429972704783,
1262
+ "train_runtime": 6988.5729,
1263
+ "train_samples_per_second": 16.173,
1264
+ "train_steps_per_second": 0.126
1265
  }
1266
  ],
1267
  "logging_steps": 10,
1268
+ "max_steps": 883,
1269
  "num_train_epochs": 1,
1270
  "save_steps": 1000,
1271
  "total_flos": 0.0,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:33b5326d4ea988e8ae29143a1ab2dfc0048f16de81f7d0de6e419d974560da26
3
  size 5944
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d532f4704c9f808c3ea9e327a50c66a63a95927fe15fa88c50065901d0980f26
3
  size 5944