n1ck-guo commited on
Commit
dda3d2d
·
verified ·
1 Parent(s): dbffddd

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ model.safetensors.index.json filter=lfs diff=lfs merge=lfs -text
37
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</think>": 151668,
3
+ "</tool_call>": 151658,
4
+ "</tool_response>": 151666,
5
+ "<think>": 151667,
6
+ "<tool_call>": 151657,
7
+ "<tool_response>": 151665,
8
+ "<|box_end|>": 151649,
9
+ "<|box_start|>": 151648,
10
+ "<|endoftext|>": 151643,
11
+ "<|file_sep|>": 151664,
12
+ "<|fim_middle|>": 151660,
13
+ "<|fim_pad|>": 151662,
14
+ "<|fim_prefix|>": 151659,
15
+ "<|fim_suffix|>": 151661,
16
+ "<|im_end|>": 151645,
17
+ "<|im_start|>": 151644,
18
+ "<|image_pad|>": 151655,
19
+ "<|object_ref_end|>": 151647,
20
+ "<|object_ref_start|>": 151646,
21
+ "<|quad_end|>": 151651,
22
+ "<|quad_start|>": 151650,
23
+ "<|repo_name|>": 151663,
24
+ "<|video_pad|>": 151656,
25
+ "<|vision_end|>": 151653,
26
+ "<|vision_pad|>": 151654,
27
+ "<|vision_start|>": 151652
28
+ }
chat_template.jinja ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0].role == 'system' %}
4
+ {{- messages[0].content + '\n\n' }}
5
+ {%- endif %}
6
+ {{- "# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
7
+ {%- for tool in tools %}
8
+ {{- "\n" }}
9
+ {{- tool | tojson }}
10
+ {%- endfor %}
11
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
12
+ {%- else %}
13
+ {%- if messages[0].role == 'system' %}
14
+ {{- '<|im_start|>system\n' + messages[0].content + '<|im_end|>\n' }}
15
+ {%- endif %}
16
+ {%- endif %}
17
+ {%- set ns = namespace(multi_step_tool=true, last_query_index=messages|length - 1) %}
18
+ {%- for message in messages[::-1] %}
19
+ {%- set index = (messages|length - 1) - loop.index0 %}
20
+ {%- if ns.multi_step_tool and message.role == "user" and message.content is string and not(message.content.startswith('<tool_response>') and message.content.endswith('</tool_response>')) %}
21
+ {%- set ns.multi_step_tool = false %}
22
+ {%- set ns.last_query_index = index %}
23
+ {%- endif %}
24
+ {%- endfor %}
25
+ {%- for message in messages %}
26
+ {%- if message.content is string %}
27
+ {%- set content = message.content %}
28
+ {%- else %}
29
+ {%- set content = '' %}
30
+ {%- endif %}
31
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) %}
32
+ {{- '<|im_start|>' + message.role + '\n' + content + '<|im_end|>' + '\n' }}
33
+ {%- elif message.role == "assistant" %}
34
+ {%- set reasoning_content = '' %}
35
+ {%- if message.reasoning_content is string %}
36
+ {%- set reasoning_content = message.reasoning_content %}
37
+ {%- else %}
38
+ {%- if '</think>' in content %}
39
+ {%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}
40
+ {%- set content = content.split('</think>')[-1].lstrip('\n') %}
41
+ {%- endif %}
42
+ {%- endif %}
43
+ {%- if loop.index0 > ns.last_query_index %}
44
+ {%- if loop.last or (not loop.last and reasoning_content) %}
45
+ {{- '<|im_start|>' + message.role + '\n<think>\n' + reasoning_content.strip('\n') + '\n</think>\n\n' + content.lstrip('\n') }}
46
+ {%- else %}
47
+ {{- '<|im_start|>' + message.role + '\n' + content }}
48
+ {%- endif %}
49
+ {%- else %}
50
+ {{- '<|im_start|>' + message.role + '\n' + content }}
51
+ {%- endif %}
52
+ {%- if message.tool_calls %}
53
+ {%- for tool_call in message.tool_calls %}
54
+ {%- if (loop.first and content) or (not loop.first) %}
55
+ {{- '\n' }}
56
+ {%- endif %}
57
+ {%- if tool_call.function %}
58
+ {%- set tool_call = tool_call.function %}
59
+ {%- endif %}
60
+ {{- '<tool_call>\n{"name": "' }}
61
+ {{- tool_call.name }}
62
+ {{- '", "arguments": ' }}
63
+ {%- if tool_call.arguments is string %}
64
+ {{- tool_call.arguments }}
65
+ {%- else %}
66
+ {{- tool_call.arguments | tojson }}
67
+ {%- endif %}
68
+ {{- '}\n</tool_call>' }}
69
+ {%- endfor %}
70
+ {%- endif %}
71
+ {{- '<|im_end|>\n' }}
72
+ {%- elif message.role == "tool" %}
73
+ {%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
74
+ {{- '<|im_start|>user' }}
75
+ {%- endif %}
76
+ {{- '\n<tool_response>\n' }}
77
+ {{- content }}
78
+ {{- '\n</tool_response>' }}
79
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
80
+ {{- '<|im_end|>\n' }}
81
+ {%- endif %}
82
+ {%- endif %}
83
+ {%- endfor %}
84
+ {%- if add_generation_prompt %}
85
+ {{- '<|im_start|>assistant\n' }}
86
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,862 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3NextForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "decoder_sparse_step": 1,
9
+ "dtype": "bfloat16",
10
+ "eos_token_id": 151645,
11
+ "full_attention_interval": 4,
12
+ "head_dim": 256,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 2048,
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 5120,
17
+ "layer_types": [
18
+ "linear_attention",
19
+ "linear_attention",
20
+ "linear_attention",
21
+ "full_attention",
22
+ "linear_attention",
23
+ "linear_attention",
24
+ "linear_attention",
25
+ "full_attention",
26
+ "linear_attention",
27
+ "linear_attention",
28
+ "linear_attention",
29
+ "full_attention",
30
+ "linear_attention",
31
+ "linear_attention",
32
+ "linear_attention",
33
+ "full_attention",
34
+ "linear_attention",
35
+ "linear_attention",
36
+ "linear_attention",
37
+ "full_attention",
38
+ "linear_attention",
39
+ "linear_attention",
40
+ "linear_attention",
41
+ "full_attention",
42
+ "linear_attention",
43
+ "linear_attention",
44
+ "linear_attention",
45
+ "full_attention",
46
+ "linear_attention",
47
+ "linear_attention",
48
+ "linear_attention",
49
+ "full_attention",
50
+ "linear_attention",
51
+ "linear_attention",
52
+ "linear_attention",
53
+ "full_attention",
54
+ "linear_attention",
55
+ "linear_attention",
56
+ "linear_attention",
57
+ "full_attention",
58
+ "linear_attention",
59
+ "linear_attention",
60
+ "linear_attention",
61
+ "full_attention",
62
+ "linear_attention",
63
+ "linear_attention",
64
+ "linear_attention",
65
+ "full_attention"
66
+ ],
67
+ "linear_conv_kernel_dim": 4,
68
+ "linear_key_head_dim": 128,
69
+ "linear_num_key_heads": 16,
70
+ "linear_num_value_heads": 32,
71
+ "linear_value_head_dim": 128,
72
+ "max_position_embeddings": 262144,
73
+ "mlp_only_layers": [],
74
+ "model_type": "qwen3_next",
75
+ "moe_intermediate_size": 512,
76
+ "norm_topk_prob": true,
77
+ "num_attention_heads": 16,
78
+ "num_experts": 512,
79
+ "num_experts_per_tok": 10,
80
+ "num_hidden_layers": 48,
81
+ "num_key_value_heads": 2,
82
+ "output_router_logits": false,
83
+ "partial_rotary_factor": 0.25,
84
+ "quantization_config": {
85
+ "autoround_version": "0.7.0",
86
+ "bits": 4,
87
+ "data_type": "int",
88
+ "extra_config": {
89
+ "model.layers.0.linear_attn.in_proj_ba": {
90
+ "bits": 8
91
+ },
92
+ "model.layers.0.linear_attn.in_proj_qkvz": {
93
+ "bits": 8
94
+ },
95
+ "model.layers.0.linear_attn.out_proj": {
96
+ "bits": 8
97
+ },
98
+ "model.layers.0.mlp.gate": {
99
+ "bits": 8
100
+ },
101
+ "model.layers.0.mlp.shared_expert_gate": {
102
+ "bits": 16
103
+ },
104
+ "model.layers.1.linear_attn.in_proj_ba": {
105
+ "bits": 8
106
+ },
107
+ "model.layers.1.linear_attn.in_proj_qkvz": {
108
+ "bits": 8
109
+ },
110
+ "model.layers.1.linear_attn.out_proj": {
111
+ "bits": 8
112
+ },
113
+ "model.layers.1.mlp.gate": {
114
+ "bits": 8
115
+ },
116
+ "model.layers.1.mlp.shared_expert_gate": {
117
+ "bits": 16
118
+ },
119
+ "model.layers.10.linear_attn.in_proj_ba": {
120
+ "bits": 8
121
+ },
122
+ "model.layers.10.linear_attn.in_proj_qkvz": {
123
+ "bits": 8
124
+ },
125
+ "model.layers.10.linear_attn.out_proj": {
126
+ "bits": 8
127
+ },
128
+ "model.layers.10.mlp.gate": {
129
+ "bits": 8
130
+ },
131
+ "model.layers.10.mlp.shared_expert_gate": {
132
+ "bits": 16
133
+ },
134
+ "model.layers.11.mlp.gate": {
135
+ "bits": 8
136
+ },
137
+ "model.layers.11.mlp.shared_expert_gate": {
138
+ "bits": 16
139
+ },
140
+ "model.layers.11.self_attn.k_proj": {
141
+ "bits": 8
142
+ },
143
+ "model.layers.11.self_attn.o_proj": {
144
+ "bits": 8
145
+ },
146
+ "model.layers.11.self_attn.q_proj": {
147
+ "bits": 8
148
+ },
149
+ "model.layers.11.self_attn.v_proj": {
150
+ "bits": 8
151
+ },
152
+ "model.layers.12.linear_attn.in_proj_ba": {
153
+ "bits": 8
154
+ },
155
+ "model.layers.12.linear_attn.in_proj_qkvz": {
156
+ "bits": 8
157
+ },
158
+ "model.layers.12.linear_attn.out_proj": {
159
+ "bits": 8
160
+ },
161
+ "model.layers.12.mlp.gate": {
162
+ "bits": 8
163
+ },
164
+ "model.layers.12.mlp.shared_expert_gate": {
165
+ "bits": 16
166
+ },
167
+ "model.layers.13.linear_attn.in_proj_ba": {
168
+ "bits": 8
169
+ },
170
+ "model.layers.13.linear_attn.in_proj_qkvz": {
171
+ "bits": 8
172
+ },
173
+ "model.layers.13.linear_attn.out_proj": {
174
+ "bits": 8
175
+ },
176
+ "model.layers.13.mlp.gate": {
177
+ "bits": 8
178
+ },
179
+ "model.layers.13.mlp.shared_expert_gate": {
180
+ "bits": 16
181
+ },
182
+ "model.layers.14.linear_attn.in_proj_ba": {
183
+ "bits": 8
184
+ },
185
+ "model.layers.14.linear_attn.in_proj_qkvz": {
186
+ "bits": 8
187
+ },
188
+ "model.layers.14.linear_attn.out_proj": {
189
+ "bits": 8
190
+ },
191
+ "model.layers.14.mlp.gate": {
192
+ "bits": 8
193
+ },
194
+ "model.layers.14.mlp.shared_expert_gate": {
195
+ "bits": 16
196
+ },
197
+ "model.layers.15.mlp.gate": {
198
+ "bits": 8
199
+ },
200
+ "model.layers.15.mlp.shared_expert_gate": {
201
+ "bits": 16
202
+ },
203
+ "model.layers.15.self_attn.k_proj": {
204
+ "bits": 8
205
+ },
206
+ "model.layers.15.self_attn.o_proj": {
207
+ "bits": 8
208
+ },
209
+ "model.layers.15.self_attn.q_proj": {
210
+ "bits": 8
211
+ },
212
+ "model.layers.15.self_attn.v_proj": {
213
+ "bits": 8
214
+ },
215
+ "model.layers.16.linear_attn.in_proj_ba": {
216
+ "bits": 8
217
+ },
218
+ "model.layers.16.linear_attn.in_proj_qkvz": {
219
+ "bits": 8
220
+ },
221
+ "model.layers.16.linear_attn.out_proj": {
222
+ "bits": 8
223
+ },
224
+ "model.layers.16.mlp.gate": {
225
+ "bits": 8
226
+ },
227
+ "model.layers.16.mlp.shared_expert_gate": {
228
+ "bits": 16
229
+ },
230
+ "model.layers.17.linear_attn.in_proj_ba": {
231
+ "bits": 8
232
+ },
233
+ "model.layers.17.linear_attn.in_proj_qkvz": {
234
+ "bits": 8
235
+ },
236
+ "model.layers.17.linear_attn.out_proj": {
237
+ "bits": 8
238
+ },
239
+ "model.layers.17.mlp.gate": {
240
+ "bits": 8
241
+ },
242
+ "model.layers.17.mlp.shared_expert_gate": {
243
+ "bits": 16
244
+ },
245
+ "model.layers.18.linear_attn.in_proj_ba": {
246
+ "bits": 8
247
+ },
248
+ "model.layers.18.linear_attn.in_proj_qkvz": {
249
+ "bits": 8
250
+ },
251
+ "model.layers.18.linear_attn.out_proj": {
252
+ "bits": 8
253
+ },
254
+ "model.layers.18.mlp.gate": {
255
+ "bits": 8
256
+ },
257
+ "model.layers.18.mlp.shared_expert_gate": {
258
+ "bits": 16
259
+ },
260
+ "model.layers.19.mlp.gate": {
261
+ "bits": 8
262
+ },
263
+ "model.layers.19.mlp.shared_expert_gate": {
264
+ "bits": 16
265
+ },
266
+ "model.layers.19.self_attn.k_proj": {
267
+ "bits": 8
268
+ },
269
+ "model.layers.19.self_attn.o_proj": {
270
+ "bits": 8
271
+ },
272
+ "model.layers.19.self_attn.q_proj": {
273
+ "bits": 8
274
+ },
275
+ "model.layers.19.self_attn.v_proj": {
276
+ "bits": 8
277
+ },
278
+ "model.layers.2.linear_attn.in_proj_ba": {
279
+ "bits": 8
280
+ },
281
+ "model.layers.2.linear_attn.in_proj_qkvz": {
282
+ "bits": 8
283
+ },
284
+ "model.layers.2.linear_attn.out_proj": {
285
+ "bits": 8
286
+ },
287
+ "model.layers.2.mlp.gate": {
288
+ "bits": 8
289
+ },
290
+ "model.layers.2.mlp.shared_expert_gate": {
291
+ "bits": 16
292
+ },
293
+ "model.layers.20.linear_attn.in_proj_ba": {
294
+ "bits": 8
295
+ },
296
+ "model.layers.20.linear_attn.in_proj_qkvz": {
297
+ "bits": 8
298
+ },
299
+ "model.layers.20.linear_attn.out_proj": {
300
+ "bits": 8
301
+ },
302
+ "model.layers.20.mlp.gate": {
303
+ "bits": 8
304
+ },
305
+ "model.layers.20.mlp.shared_expert_gate": {
306
+ "bits": 16
307
+ },
308
+ "model.layers.21.linear_attn.in_proj_ba": {
309
+ "bits": 8
310
+ },
311
+ "model.layers.21.linear_attn.in_proj_qkvz": {
312
+ "bits": 8
313
+ },
314
+ "model.layers.21.linear_attn.out_proj": {
315
+ "bits": 8
316
+ },
317
+ "model.layers.21.mlp.gate": {
318
+ "bits": 8
319
+ },
320
+ "model.layers.21.mlp.shared_expert_gate": {
321
+ "bits": 16
322
+ },
323
+ "model.layers.22.linear_attn.in_proj_ba": {
324
+ "bits": 8
325
+ },
326
+ "model.layers.22.linear_attn.in_proj_qkvz": {
327
+ "bits": 8
328
+ },
329
+ "model.layers.22.linear_attn.out_proj": {
330
+ "bits": 8
331
+ },
332
+ "model.layers.22.mlp.gate": {
333
+ "bits": 8
334
+ },
335
+ "model.layers.22.mlp.shared_expert_gate": {
336
+ "bits": 16
337
+ },
338
+ "model.layers.23.mlp.gate": {
339
+ "bits": 8
340
+ },
341
+ "model.layers.23.mlp.shared_expert_gate": {
342
+ "bits": 16
343
+ },
344
+ "model.layers.23.self_attn.k_proj": {
345
+ "bits": 8
346
+ },
347
+ "model.layers.23.self_attn.o_proj": {
348
+ "bits": 8
349
+ },
350
+ "model.layers.23.self_attn.q_proj": {
351
+ "bits": 8
352
+ },
353
+ "model.layers.23.self_attn.v_proj": {
354
+ "bits": 8
355
+ },
356
+ "model.layers.24.linear_attn.in_proj_ba": {
357
+ "bits": 8
358
+ },
359
+ "model.layers.24.linear_attn.in_proj_qkvz": {
360
+ "bits": 8
361
+ },
362
+ "model.layers.24.linear_attn.out_proj": {
363
+ "bits": 8
364
+ },
365
+ "model.layers.24.mlp.gate": {
366
+ "bits": 8
367
+ },
368
+ "model.layers.24.mlp.shared_expert_gate": {
369
+ "bits": 16
370
+ },
371
+ "model.layers.25.linear_attn.in_proj_ba": {
372
+ "bits": 8
373
+ },
374
+ "model.layers.25.linear_attn.in_proj_qkvz": {
375
+ "bits": 8
376
+ },
377
+ "model.layers.25.linear_attn.out_proj": {
378
+ "bits": 8
379
+ },
380
+ "model.layers.25.mlp.gate": {
381
+ "bits": 8
382
+ },
383
+ "model.layers.25.mlp.shared_expert_gate": {
384
+ "bits": 16
385
+ },
386
+ "model.layers.26.linear_attn.in_proj_ba": {
387
+ "bits": 8
388
+ },
389
+ "model.layers.26.linear_attn.in_proj_qkvz": {
390
+ "bits": 8
391
+ },
392
+ "model.layers.26.linear_attn.out_proj": {
393
+ "bits": 8
394
+ },
395
+ "model.layers.26.mlp.gate": {
396
+ "bits": 8
397
+ },
398
+ "model.layers.26.mlp.shared_expert_gate": {
399
+ "bits": 16
400
+ },
401
+ "model.layers.27.mlp.gate": {
402
+ "bits": 8
403
+ },
404
+ "model.layers.27.mlp.shared_expert_gate": {
405
+ "bits": 16
406
+ },
407
+ "model.layers.27.self_attn.k_proj": {
408
+ "bits": 8
409
+ },
410
+ "model.layers.27.self_attn.o_proj": {
411
+ "bits": 8
412
+ },
413
+ "model.layers.27.self_attn.q_proj": {
414
+ "bits": 8
415
+ },
416
+ "model.layers.27.self_attn.v_proj": {
417
+ "bits": 8
418
+ },
419
+ "model.layers.28.linear_attn.in_proj_ba": {
420
+ "bits": 8
421
+ },
422
+ "model.layers.28.linear_attn.in_proj_qkvz": {
423
+ "bits": 8
424
+ },
425
+ "model.layers.28.linear_attn.out_proj": {
426
+ "bits": 8
427
+ },
428
+ "model.layers.28.mlp.gate": {
429
+ "bits": 8
430
+ },
431
+ "model.layers.28.mlp.shared_expert_gate": {
432
+ "bits": 16
433
+ },
434
+ "model.layers.29.linear_attn.in_proj_ba": {
435
+ "bits": 8
436
+ },
437
+ "model.layers.29.linear_attn.in_proj_qkvz": {
438
+ "bits": 8
439
+ },
440
+ "model.layers.29.linear_attn.out_proj": {
441
+ "bits": 8
442
+ },
443
+ "model.layers.29.mlp.gate": {
444
+ "bits": 8
445
+ },
446
+ "model.layers.29.mlp.shared_expert_gate": {
447
+ "bits": 16
448
+ },
449
+ "model.layers.3.mlp.gate": {
450
+ "bits": 8
451
+ },
452
+ "model.layers.3.mlp.shared_expert_gate": {
453
+ "bits": 16
454
+ },
455
+ "model.layers.3.self_attn.k_proj": {
456
+ "bits": 8
457
+ },
458
+ "model.layers.3.self_attn.o_proj": {
459
+ "bits": 8
460
+ },
461
+ "model.layers.3.self_attn.q_proj": {
462
+ "bits": 8
463
+ },
464
+ "model.layers.3.self_attn.v_proj": {
465
+ "bits": 8
466
+ },
467
+ "model.layers.30.linear_attn.in_proj_ba": {
468
+ "bits": 8
469
+ },
470
+ "model.layers.30.linear_attn.in_proj_qkvz": {
471
+ "bits": 8
472
+ },
473
+ "model.layers.30.linear_attn.out_proj": {
474
+ "bits": 8
475
+ },
476
+ "model.layers.30.mlp.gate": {
477
+ "bits": 8
478
+ },
479
+ "model.layers.30.mlp.shared_expert_gate": {
480
+ "bits": 16
481
+ },
482
+ "model.layers.31.mlp.gate": {
483
+ "bits": 8
484
+ },
485
+ "model.layers.31.mlp.shared_expert_gate": {
486
+ "bits": 16
487
+ },
488
+ "model.layers.31.self_attn.k_proj": {
489
+ "bits": 8
490
+ },
491
+ "model.layers.31.self_attn.o_proj": {
492
+ "bits": 8
493
+ },
494
+ "model.layers.31.self_attn.q_proj": {
495
+ "bits": 8
496
+ },
497
+ "model.layers.31.self_attn.v_proj": {
498
+ "bits": 8
499
+ },
500
+ "model.layers.32.linear_attn.in_proj_ba": {
501
+ "bits": 8
502
+ },
503
+ "model.layers.32.linear_attn.in_proj_qkvz": {
504
+ "bits": 8
505
+ },
506
+ "model.layers.32.linear_attn.out_proj": {
507
+ "bits": 8
508
+ },
509
+ "model.layers.32.mlp.gate": {
510
+ "bits": 8
511
+ },
512
+ "model.layers.32.mlp.shared_expert_gate": {
513
+ "bits": 16
514
+ },
515
+ "model.layers.33.linear_attn.in_proj_ba": {
516
+ "bits": 8
517
+ },
518
+ "model.layers.33.linear_attn.in_proj_qkvz": {
519
+ "bits": 8
520
+ },
521
+ "model.layers.33.linear_attn.out_proj": {
522
+ "bits": 8
523
+ },
524
+ "model.layers.33.mlp.gate": {
525
+ "bits": 8
526
+ },
527
+ "model.layers.33.mlp.shared_expert_gate": {
528
+ "bits": 16
529
+ },
530
+ "model.layers.34.linear_attn.in_proj_ba": {
531
+ "bits": 8
532
+ },
533
+ "model.layers.34.linear_attn.in_proj_qkvz": {
534
+ "bits": 8
535
+ },
536
+ "model.layers.34.linear_attn.out_proj": {
537
+ "bits": 8
538
+ },
539
+ "model.layers.34.mlp.gate": {
540
+ "bits": 8
541
+ },
542
+ "model.layers.34.mlp.shared_expert_gate": {
543
+ "bits": 16
544
+ },
545
+ "model.layers.35.mlp.gate": {
546
+ "bits": 8
547
+ },
548
+ "model.layers.35.mlp.shared_expert_gate": {
549
+ "bits": 16
550
+ },
551
+ "model.layers.35.self_attn.k_proj": {
552
+ "bits": 8
553
+ },
554
+ "model.layers.35.self_attn.o_proj": {
555
+ "bits": 8
556
+ },
557
+ "model.layers.35.self_attn.q_proj": {
558
+ "bits": 8
559
+ },
560
+ "model.layers.35.self_attn.v_proj": {
561
+ "bits": 8
562
+ },
563
+ "model.layers.36.linear_attn.in_proj_ba": {
564
+ "bits": 8
565
+ },
566
+ "model.layers.36.linear_attn.in_proj_qkvz": {
567
+ "bits": 8
568
+ },
569
+ "model.layers.36.linear_attn.out_proj": {
570
+ "bits": 8
571
+ },
572
+ "model.layers.36.mlp.gate": {
573
+ "bits": 8
574
+ },
575
+ "model.layers.36.mlp.shared_expert_gate": {
576
+ "bits": 16
577
+ },
578
+ "model.layers.37.linear_attn.in_proj_ba": {
579
+ "bits": 8
580
+ },
581
+ "model.layers.37.linear_attn.in_proj_qkvz": {
582
+ "bits": 8
583
+ },
584
+ "model.layers.37.linear_attn.out_proj": {
585
+ "bits": 8
586
+ },
587
+ "model.layers.37.mlp.gate": {
588
+ "bits": 8
589
+ },
590
+ "model.layers.37.mlp.shared_expert_gate": {
591
+ "bits": 16
592
+ },
593
+ "model.layers.38.linear_attn.in_proj_ba": {
594
+ "bits": 8
595
+ },
596
+ "model.layers.38.linear_attn.in_proj_qkvz": {
597
+ "bits": 8
598
+ },
599
+ "model.layers.38.linear_attn.out_proj": {
600
+ "bits": 8
601
+ },
602
+ "model.layers.38.mlp.gate": {
603
+ "bits": 8
604
+ },
605
+ "model.layers.38.mlp.shared_expert_gate": {
606
+ "bits": 16
607
+ },
608
+ "model.layers.39.mlp.gate": {
609
+ "bits": 8
610
+ },
611
+ "model.layers.39.mlp.shared_expert_gate": {
612
+ "bits": 16
613
+ },
614
+ "model.layers.39.self_attn.k_proj": {
615
+ "bits": 8
616
+ },
617
+ "model.layers.39.self_attn.o_proj": {
618
+ "bits": 8
619
+ },
620
+ "model.layers.39.self_attn.q_proj": {
621
+ "bits": 8
622
+ },
623
+ "model.layers.39.self_attn.v_proj": {
624
+ "bits": 8
625
+ },
626
+ "model.layers.4.linear_attn.in_proj_ba": {
627
+ "bits": 8
628
+ },
629
+ "model.layers.4.linear_attn.in_proj_qkvz": {
630
+ "bits": 8
631
+ },
632
+ "model.layers.4.linear_attn.out_proj": {
633
+ "bits": 8
634
+ },
635
+ "model.layers.4.mlp.gate": {
636
+ "bits": 8
637
+ },
638
+ "model.layers.4.mlp.shared_expert_gate": {
639
+ "bits": 16
640
+ },
641
+ "model.layers.40.linear_attn.in_proj_ba": {
642
+ "bits": 8
643
+ },
644
+ "model.layers.40.linear_attn.in_proj_qkvz": {
645
+ "bits": 8
646
+ },
647
+ "model.layers.40.linear_attn.out_proj": {
648
+ "bits": 8
649
+ },
650
+ "model.layers.40.mlp.gate": {
651
+ "bits": 8
652
+ },
653
+ "model.layers.40.mlp.shared_expert_gate": {
654
+ "bits": 16
655
+ },
656
+ "model.layers.41.linear_attn.in_proj_ba": {
657
+ "bits": 8
658
+ },
659
+ "model.layers.41.linear_attn.in_proj_qkvz": {
660
+ "bits": 8
661
+ },
662
+ "model.layers.41.linear_attn.out_proj": {
663
+ "bits": 8
664
+ },
665
+ "model.layers.41.mlp.gate": {
666
+ "bits": 8
667
+ },
668
+ "model.layers.41.mlp.shared_expert_gate": {
669
+ "bits": 16
670
+ },
671
+ "model.layers.42.linear_attn.in_proj_ba": {
672
+ "bits": 8
673
+ },
674
+ "model.layers.42.linear_attn.in_proj_qkvz": {
675
+ "bits": 8
676
+ },
677
+ "model.layers.42.linear_attn.out_proj": {
678
+ "bits": 8
679
+ },
680
+ "model.layers.42.mlp.gate": {
681
+ "bits": 8
682
+ },
683
+ "model.layers.42.mlp.shared_expert_gate": {
684
+ "bits": 16
685
+ },
686
+ "model.layers.43.mlp.gate": {
687
+ "bits": 8
688
+ },
689
+ "model.layers.43.mlp.shared_expert_gate": {
690
+ "bits": 16
691
+ },
692
+ "model.layers.43.self_attn.k_proj": {
693
+ "bits": 8
694
+ },
695
+ "model.layers.43.self_attn.o_proj": {
696
+ "bits": 8
697
+ },
698
+ "model.layers.43.self_attn.q_proj": {
699
+ "bits": 8
700
+ },
701
+ "model.layers.43.self_attn.v_proj": {
702
+ "bits": 8
703
+ },
704
+ "model.layers.44.linear_attn.in_proj_ba": {
705
+ "bits": 8
706
+ },
707
+ "model.layers.44.linear_attn.in_proj_qkvz": {
708
+ "bits": 8
709
+ },
710
+ "model.layers.44.linear_attn.out_proj": {
711
+ "bits": 8
712
+ },
713
+ "model.layers.44.mlp.gate": {
714
+ "bits": 8
715
+ },
716
+ "model.layers.44.mlp.shared_expert_gate": {
717
+ "bits": 16
718
+ },
719
+ "model.layers.45.linear_attn.in_proj_ba": {
720
+ "bits": 8
721
+ },
722
+ "model.layers.45.linear_attn.in_proj_qkvz": {
723
+ "bits": 8
724
+ },
725
+ "model.layers.45.linear_attn.out_proj": {
726
+ "bits": 8
727
+ },
728
+ "model.layers.45.mlp.gate": {
729
+ "bits": 8
730
+ },
731
+ "model.layers.45.mlp.shared_expert_gate": {
732
+ "bits": 16
733
+ },
734
+ "model.layers.46.linear_attn.in_proj_ba": {
735
+ "bits": 8
736
+ },
737
+ "model.layers.46.linear_attn.in_proj_qkvz": {
738
+ "bits": 8
739
+ },
740
+ "model.layers.46.linear_attn.out_proj": {
741
+ "bits": 8
742
+ },
743
+ "model.layers.46.mlp.gate": {
744
+ "bits": 8
745
+ },
746
+ "model.layers.46.mlp.shared_expert_gate": {
747
+ "bits": 16
748
+ },
749
+ "model.layers.47.mlp.gate": {
750
+ "bits": 8
751
+ },
752
+ "model.layers.47.mlp.shared_expert_gate": {
753
+ "bits": 16
754
+ },
755
+ "model.layers.47.self_attn.k_proj": {
756
+ "bits": 8
757
+ },
758
+ "model.layers.47.self_attn.o_proj": {
759
+ "bits": 8
760
+ },
761
+ "model.layers.47.self_attn.q_proj": {
762
+ "bits": 8
763
+ },
764
+ "model.layers.47.self_attn.v_proj": {
765
+ "bits": 8
766
+ },
767
+ "model.layers.5.linear_attn.in_proj_ba": {
768
+ "bits": 8
769
+ },
770
+ "model.layers.5.linear_attn.in_proj_qkvz": {
771
+ "bits": 8
772
+ },
773
+ "model.layers.5.linear_attn.out_proj": {
774
+ "bits": 8
775
+ },
776
+ "model.layers.5.mlp.gate": {
777
+ "bits": 8
778
+ },
779
+ "model.layers.5.mlp.shared_expert_gate": {
780
+ "bits": 16
781
+ },
782
+ "model.layers.6.linear_attn.in_proj_ba": {
783
+ "bits": 8
784
+ },
785
+ "model.layers.6.linear_attn.in_proj_qkvz": {
786
+ "bits": 8
787
+ },
788
+ "model.layers.6.linear_attn.out_proj": {
789
+ "bits": 8
790
+ },
791
+ "model.layers.6.mlp.gate": {
792
+ "bits": 8
793
+ },
794
+ "model.layers.6.mlp.shared_expert_gate": {
795
+ "bits": 16
796
+ },
797
+ "model.layers.7.mlp.gate": {
798
+ "bits": 8
799
+ },
800
+ "model.layers.7.mlp.shared_expert_gate": {
801
+ "bits": 16
802
+ },
803
+ "model.layers.7.self_attn.k_proj": {
804
+ "bits": 8
805
+ },
806
+ "model.layers.7.self_attn.o_proj": {
807
+ "bits": 8
808
+ },
809
+ "model.layers.7.self_attn.q_proj": {
810
+ "bits": 8
811
+ },
812
+ "model.layers.7.self_attn.v_proj": {
813
+ "bits": 8
814
+ },
815
+ "model.layers.8.linear_attn.in_proj_ba": {
816
+ "bits": 8
817
+ },
818
+ "model.layers.8.linear_attn.in_proj_qkvz": {
819
+ "bits": 8
820
+ },
821
+ "model.layers.8.linear_attn.out_proj": {
822
+ "bits": 8
823
+ },
824
+ "model.layers.8.mlp.gate": {
825
+ "bits": 8
826
+ },
827
+ "model.layers.8.mlp.shared_expert_gate": {
828
+ "bits": 16
829
+ },
830
+ "model.layers.9.linear_attn.in_proj_ba": {
831
+ "bits": 8
832
+ },
833
+ "model.layers.9.linear_attn.in_proj_qkvz": {
834
+ "bits": 8
835
+ },
836
+ "model.layers.9.linear_attn.out_proj": {
837
+ "bits": 8
838
+ },
839
+ "model.layers.9.mlp.gate": {
840
+ "bits": 8
841
+ },
842
+ "model.layers.9.mlp.shared_expert_gate": {
843
+ "bits": 16
844
+ }
845
+ },
846
+ "group_size": 128,
847
+ "iters": 0,
848
+ "packing_format": "auto_round:auto_gptq",
849
+ "quant_method": "auto-round",
850
+ "sym": true
851
+ },
852
+ "rms_norm_eps": 1e-06,
853
+ "rope_scaling": null,
854
+ "rope_theta": 10000000,
855
+ "router_aux_loss_coef": 0.001,
856
+ "shared_expert_intermediate_size": 512,
857
+ "tie_word_embeddings": false,
858
+ "transformers_version": "4.57.0.dev0",
859
+ "use_cache": true,
860
+ "use_sliding_window": false,
861
+ "vocab_size": 151936
862
+ }
generation_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "temperature": 0.7,
10
+ "top_k": 20,
11
+ "top_p": 0.8,
12
+ "transformers_version": "4.57.0.dev0"
13
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59c46d4fc36ff7069f30a30efa8ea7b81221c1243bf0bac11e9cdf59ee583ae9
3
+ size 4988686872
model-00002-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c631a6822b256b79c1362df283980b5fd0022eb206f776ce6cd26a63ef48d21
3
+ size 5003124872
model-00003-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5e485b4cf402766bc0c68a88104c1f384b61fe878aca518f9c88fd24eecf145
3
+ size 5003129664
model-00004-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f63c9f296f74a82f7e187c24103e97ad8083df01b96e70fc44a12a307c1a05fc
3
+ size 5003226320
model-00005-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b464bc40a58831ea085b4da0432af1391cda7a66d9346b6fac4778f2a55394e
3
+ size 5002935728
model-00006-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:373a661a5bc5bec77ae09095a4dbee8c3f5afd1c5a479259632256fff27e749a
3
+ size 5003226640
model-00007-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:015d7349e1be0b491e17e9a0993f34eec1126972620a11d25f6ce3851e128dd1
3
+ size 5003129904
model-00008-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9d7da56b94b73c285d0f5ffffafcf40a9077a28f1c57346cff97e8f69be96a6
3
+ size 5003226672
model-00009-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc0fb1207e856547ed9f313e89b6f62bd3fefe58b1d57a2aebef2cd266218e05
3
+ size 3134616120
model.safetensors.index.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:725d03d8f1570858c9c13b15ec3424f06086f6bf1a49a50d341ee89d80a18d56
3
+ size 20300105
quantization_config.json ADDED
@@ -0,0 +1,768 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bits": 4,
3
+ "group_size": 128,
4
+ "sym": true,
5
+ "data_type": "int",
6
+ "iters": 0,
7
+ "autoround_version": "0.7.0",
8
+ "quant_method": "auto-round",
9
+ "packing_format": "auto_round:auto_gptq",
10
+ "extra_config": {
11
+ "model.layers.0.linear_attn.in_proj_qkvz": {
12
+ "bits": 8
13
+ },
14
+ "model.layers.0.linear_attn.in_proj_ba": {
15
+ "bits": 8
16
+ },
17
+ "model.layers.0.linear_attn.out_proj": {
18
+ "bits": 8
19
+ },
20
+ "model.layers.0.mlp.gate": {
21
+ "bits": 8
22
+ },
23
+ "model.layers.0.mlp.shared_expert_gate": {
24
+ "bits": 16
25
+ },
26
+ "model.layers.1.linear_attn.in_proj_qkvz": {
27
+ "bits": 8
28
+ },
29
+ "model.layers.1.linear_attn.in_proj_ba": {
30
+ "bits": 8
31
+ },
32
+ "model.layers.1.linear_attn.out_proj": {
33
+ "bits": 8
34
+ },
35
+ "model.layers.1.mlp.gate": {
36
+ "bits": 8
37
+ },
38
+ "model.layers.1.mlp.shared_expert_gate": {
39
+ "bits": 16
40
+ },
41
+ "model.layers.2.linear_attn.in_proj_qkvz": {
42
+ "bits": 8
43
+ },
44
+ "model.layers.2.linear_attn.in_proj_ba": {
45
+ "bits": 8
46
+ },
47
+ "model.layers.2.linear_attn.out_proj": {
48
+ "bits": 8
49
+ },
50
+ "model.layers.2.mlp.gate": {
51
+ "bits": 8
52
+ },
53
+ "model.layers.2.mlp.shared_expert_gate": {
54
+ "bits": 16
55
+ },
56
+ "model.layers.3.self_attn.q_proj": {
57
+ "bits": 8
58
+ },
59
+ "model.layers.3.self_attn.k_proj": {
60
+ "bits": 8
61
+ },
62
+ "model.layers.3.self_attn.v_proj": {
63
+ "bits": 8
64
+ },
65
+ "model.layers.3.self_attn.o_proj": {
66
+ "bits": 8
67
+ },
68
+ "model.layers.3.mlp.gate": {
69
+ "bits": 8
70
+ },
71
+ "model.layers.3.mlp.shared_expert_gate": {
72
+ "bits": 16
73
+ },
74
+ "model.layers.4.linear_attn.in_proj_qkvz": {
75
+ "bits": 8
76
+ },
77
+ "model.layers.4.linear_attn.in_proj_ba": {
78
+ "bits": 8
79
+ },
80
+ "model.layers.4.linear_attn.out_proj": {
81
+ "bits": 8
82
+ },
83
+ "model.layers.4.mlp.gate": {
84
+ "bits": 8
85
+ },
86
+ "model.layers.4.mlp.shared_expert_gate": {
87
+ "bits": 16
88
+ },
89
+ "model.layers.5.linear_attn.in_proj_qkvz": {
90
+ "bits": 8
91
+ },
92
+ "model.layers.5.linear_attn.in_proj_ba": {
93
+ "bits": 8
94
+ },
95
+ "model.layers.5.linear_attn.out_proj": {
96
+ "bits": 8
97
+ },
98
+ "model.layers.5.mlp.gate": {
99
+ "bits": 8
100
+ },
101
+ "model.layers.5.mlp.shared_expert_gate": {
102
+ "bits": 16
103
+ },
104
+ "model.layers.6.linear_attn.in_proj_qkvz": {
105
+ "bits": 8
106
+ },
107
+ "model.layers.6.linear_attn.in_proj_ba": {
108
+ "bits": 8
109
+ },
110
+ "model.layers.6.linear_attn.out_proj": {
111
+ "bits": 8
112
+ },
113
+ "model.layers.6.mlp.gate": {
114
+ "bits": 8
115
+ },
116
+ "model.layers.6.mlp.shared_expert_gate": {
117
+ "bits": 16
118
+ },
119
+ "model.layers.7.self_attn.q_proj": {
120
+ "bits": 8
121
+ },
122
+ "model.layers.7.self_attn.k_proj": {
123
+ "bits": 8
124
+ },
125
+ "model.layers.7.self_attn.v_proj": {
126
+ "bits": 8
127
+ },
128
+ "model.layers.7.self_attn.o_proj": {
129
+ "bits": 8
130
+ },
131
+ "model.layers.7.mlp.gate": {
132
+ "bits": 8
133
+ },
134
+ "model.layers.7.mlp.shared_expert_gate": {
135
+ "bits": 16
136
+ },
137
+ "model.layers.8.linear_attn.in_proj_qkvz": {
138
+ "bits": 8
139
+ },
140
+ "model.layers.8.linear_attn.in_proj_ba": {
141
+ "bits": 8
142
+ },
143
+ "model.layers.8.linear_attn.out_proj": {
144
+ "bits": 8
145
+ },
146
+ "model.layers.8.mlp.gate": {
147
+ "bits": 8
148
+ },
149
+ "model.layers.8.mlp.shared_expert_gate": {
150
+ "bits": 16
151
+ },
152
+ "model.layers.9.linear_attn.in_proj_qkvz": {
153
+ "bits": 8
154
+ },
155
+ "model.layers.9.linear_attn.in_proj_ba": {
156
+ "bits": 8
157
+ },
158
+ "model.layers.9.linear_attn.out_proj": {
159
+ "bits": 8
160
+ },
161
+ "model.layers.9.mlp.gate": {
162
+ "bits": 8
163
+ },
164
+ "model.layers.9.mlp.shared_expert_gate": {
165
+ "bits": 16
166
+ },
167
+ "model.layers.10.linear_attn.in_proj_qkvz": {
168
+ "bits": 8
169
+ },
170
+ "model.layers.10.linear_attn.in_proj_ba": {
171
+ "bits": 8
172
+ },
173
+ "model.layers.10.linear_attn.out_proj": {
174
+ "bits": 8
175
+ },
176
+ "model.layers.10.mlp.gate": {
177
+ "bits": 8
178
+ },
179
+ "model.layers.10.mlp.shared_expert_gate": {
180
+ "bits": 16
181
+ },
182
+ "model.layers.11.self_attn.q_proj": {
183
+ "bits": 8
184
+ },
185
+ "model.layers.11.self_attn.k_proj": {
186
+ "bits": 8
187
+ },
188
+ "model.layers.11.self_attn.v_proj": {
189
+ "bits": 8
190
+ },
191
+ "model.layers.11.self_attn.o_proj": {
192
+ "bits": 8
193
+ },
194
+ "model.layers.11.mlp.gate": {
195
+ "bits": 8
196
+ },
197
+ "model.layers.11.mlp.shared_expert_gate": {
198
+ "bits": 16
199
+ },
200
+ "model.layers.12.linear_attn.in_proj_qkvz": {
201
+ "bits": 8
202
+ },
203
+ "model.layers.12.linear_attn.in_proj_ba": {
204
+ "bits": 8
205
+ },
206
+ "model.layers.12.linear_attn.out_proj": {
207
+ "bits": 8
208
+ },
209
+ "model.layers.12.mlp.gate": {
210
+ "bits": 8
211
+ },
212
+ "model.layers.12.mlp.shared_expert_gate": {
213
+ "bits": 16
214
+ },
215
+ "model.layers.13.linear_attn.in_proj_qkvz": {
216
+ "bits": 8
217
+ },
218
+ "model.layers.13.linear_attn.in_proj_ba": {
219
+ "bits": 8
220
+ },
221
+ "model.layers.13.linear_attn.out_proj": {
222
+ "bits": 8
223
+ },
224
+ "model.layers.13.mlp.gate": {
225
+ "bits": 8
226
+ },
227
+ "model.layers.13.mlp.shared_expert_gate": {
228
+ "bits": 16
229
+ },
230
+ "model.layers.14.linear_attn.in_proj_qkvz": {
231
+ "bits": 8
232
+ },
233
+ "model.layers.14.linear_attn.in_proj_ba": {
234
+ "bits": 8
235
+ },
236
+ "model.layers.14.linear_attn.out_proj": {
237
+ "bits": 8
238
+ },
239
+ "model.layers.14.mlp.gate": {
240
+ "bits": 8
241
+ },
242
+ "model.layers.14.mlp.shared_expert_gate": {
243
+ "bits": 16
244
+ },
245
+ "model.layers.15.self_attn.q_proj": {
246
+ "bits": 8
247
+ },
248
+ "model.layers.15.self_attn.k_proj": {
249
+ "bits": 8
250
+ },
251
+ "model.layers.15.self_attn.v_proj": {
252
+ "bits": 8
253
+ },
254
+ "model.layers.15.self_attn.o_proj": {
255
+ "bits": 8
256
+ },
257
+ "model.layers.15.mlp.gate": {
258
+ "bits": 8
259
+ },
260
+ "model.layers.15.mlp.shared_expert_gate": {
261
+ "bits": 16
262
+ },
263
+ "model.layers.16.linear_attn.in_proj_qkvz": {
264
+ "bits": 8
265
+ },
266
+ "model.layers.16.linear_attn.in_proj_ba": {
267
+ "bits": 8
268
+ },
269
+ "model.layers.16.linear_attn.out_proj": {
270
+ "bits": 8
271
+ },
272
+ "model.layers.16.mlp.gate": {
273
+ "bits": 8
274
+ },
275
+ "model.layers.16.mlp.shared_expert_gate": {
276
+ "bits": 16
277
+ },
278
+ "model.layers.17.linear_attn.in_proj_qkvz": {
279
+ "bits": 8
280
+ },
281
+ "model.layers.17.linear_attn.in_proj_ba": {
282
+ "bits": 8
283
+ },
284
+ "model.layers.17.linear_attn.out_proj": {
285
+ "bits": 8
286
+ },
287
+ "model.layers.17.mlp.gate": {
288
+ "bits": 8
289
+ },
290
+ "model.layers.17.mlp.shared_expert_gate": {
291
+ "bits": 16
292
+ },
293
+ "model.layers.18.linear_attn.in_proj_qkvz": {
294
+ "bits": 8
295
+ },
296
+ "model.layers.18.linear_attn.in_proj_ba": {
297
+ "bits": 8
298
+ },
299
+ "model.layers.18.linear_attn.out_proj": {
300
+ "bits": 8
301
+ },
302
+ "model.layers.18.mlp.gate": {
303
+ "bits": 8
304
+ },
305
+ "model.layers.18.mlp.shared_expert_gate": {
306
+ "bits": 16
307
+ },
308
+ "model.layers.19.self_attn.q_proj": {
309
+ "bits": 8
310
+ },
311
+ "model.layers.19.self_attn.k_proj": {
312
+ "bits": 8
313
+ },
314
+ "model.layers.19.self_attn.v_proj": {
315
+ "bits": 8
316
+ },
317
+ "model.layers.19.self_attn.o_proj": {
318
+ "bits": 8
319
+ },
320
+ "model.layers.19.mlp.gate": {
321
+ "bits": 8
322
+ },
323
+ "model.layers.19.mlp.shared_expert_gate": {
324
+ "bits": 16
325
+ },
326
+ "model.layers.20.linear_attn.in_proj_qkvz": {
327
+ "bits": 8
328
+ },
329
+ "model.layers.20.linear_attn.in_proj_ba": {
330
+ "bits": 8
331
+ },
332
+ "model.layers.20.linear_attn.out_proj": {
333
+ "bits": 8
334
+ },
335
+ "model.layers.20.mlp.gate": {
336
+ "bits": 8
337
+ },
338
+ "model.layers.20.mlp.shared_expert_gate": {
339
+ "bits": 16
340
+ },
341
+ "model.layers.21.linear_attn.in_proj_qkvz": {
342
+ "bits": 8
343
+ },
344
+ "model.layers.21.linear_attn.in_proj_ba": {
345
+ "bits": 8
346
+ },
347
+ "model.layers.21.linear_attn.out_proj": {
348
+ "bits": 8
349
+ },
350
+ "model.layers.21.mlp.gate": {
351
+ "bits": 8
352
+ },
353
+ "model.layers.21.mlp.shared_expert_gate": {
354
+ "bits": 16
355
+ },
356
+ "model.layers.22.linear_attn.in_proj_qkvz": {
357
+ "bits": 8
358
+ },
359
+ "model.layers.22.linear_attn.in_proj_ba": {
360
+ "bits": 8
361
+ },
362
+ "model.layers.22.linear_attn.out_proj": {
363
+ "bits": 8
364
+ },
365
+ "model.layers.22.mlp.gate": {
366
+ "bits": 8
367
+ },
368
+ "model.layers.22.mlp.shared_expert_gate": {
369
+ "bits": 16
370
+ },
371
+ "model.layers.23.self_attn.q_proj": {
372
+ "bits": 8
373
+ },
374
+ "model.layers.23.self_attn.k_proj": {
375
+ "bits": 8
376
+ },
377
+ "model.layers.23.self_attn.v_proj": {
378
+ "bits": 8
379
+ },
380
+ "model.layers.23.self_attn.o_proj": {
381
+ "bits": 8
382
+ },
383
+ "model.layers.23.mlp.gate": {
384
+ "bits": 8
385
+ },
386
+ "model.layers.23.mlp.shared_expert_gate": {
387
+ "bits": 16
388
+ },
389
+ "model.layers.24.linear_attn.in_proj_qkvz": {
390
+ "bits": 8
391
+ },
392
+ "model.layers.24.linear_attn.in_proj_ba": {
393
+ "bits": 8
394
+ },
395
+ "model.layers.24.linear_attn.out_proj": {
396
+ "bits": 8
397
+ },
398
+ "model.layers.24.mlp.gate": {
399
+ "bits": 8
400
+ },
401
+ "model.layers.24.mlp.shared_expert_gate": {
402
+ "bits": 16
403
+ },
404
+ "model.layers.25.linear_attn.in_proj_qkvz": {
405
+ "bits": 8
406
+ },
407
+ "model.layers.25.linear_attn.in_proj_ba": {
408
+ "bits": 8
409
+ },
410
+ "model.layers.25.linear_attn.out_proj": {
411
+ "bits": 8
412
+ },
413
+ "model.layers.25.mlp.gate": {
414
+ "bits": 8
415
+ },
416
+ "model.layers.25.mlp.shared_expert_gate": {
417
+ "bits": 16
418
+ },
419
+ "model.layers.26.linear_attn.in_proj_qkvz": {
420
+ "bits": 8
421
+ },
422
+ "model.layers.26.linear_attn.in_proj_ba": {
423
+ "bits": 8
424
+ },
425
+ "model.layers.26.linear_attn.out_proj": {
426
+ "bits": 8
427
+ },
428
+ "model.layers.26.mlp.gate": {
429
+ "bits": 8
430
+ },
431
+ "model.layers.26.mlp.shared_expert_gate": {
432
+ "bits": 16
433
+ },
434
+ "model.layers.27.self_attn.q_proj": {
435
+ "bits": 8
436
+ },
437
+ "model.layers.27.self_attn.k_proj": {
438
+ "bits": 8
439
+ },
440
+ "model.layers.27.self_attn.v_proj": {
441
+ "bits": 8
442
+ },
443
+ "model.layers.27.self_attn.o_proj": {
444
+ "bits": 8
445
+ },
446
+ "model.layers.27.mlp.gate": {
447
+ "bits": 8
448
+ },
449
+ "model.layers.27.mlp.shared_expert_gate": {
450
+ "bits": 16
451
+ },
452
+ "model.layers.28.linear_attn.in_proj_qkvz": {
453
+ "bits": 8
454
+ },
455
+ "model.layers.28.linear_attn.in_proj_ba": {
456
+ "bits": 8
457
+ },
458
+ "model.layers.28.linear_attn.out_proj": {
459
+ "bits": 8
460
+ },
461
+ "model.layers.28.mlp.gate": {
462
+ "bits": 8
463
+ },
464
+ "model.layers.28.mlp.shared_expert_gate": {
465
+ "bits": 16
466
+ },
467
+ "model.layers.29.linear_attn.in_proj_qkvz": {
468
+ "bits": 8
469
+ },
470
+ "model.layers.29.linear_attn.in_proj_ba": {
471
+ "bits": 8
472
+ },
473
+ "model.layers.29.linear_attn.out_proj": {
474
+ "bits": 8
475
+ },
476
+ "model.layers.29.mlp.gate": {
477
+ "bits": 8
478
+ },
479
+ "model.layers.29.mlp.shared_expert_gate": {
480
+ "bits": 16
481
+ },
482
+ "model.layers.30.linear_attn.in_proj_qkvz": {
483
+ "bits": 8
484
+ },
485
+ "model.layers.30.linear_attn.in_proj_ba": {
486
+ "bits": 8
487
+ },
488
+ "model.layers.30.linear_attn.out_proj": {
489
+ "bits": 8
490
+ },
491
+ "model.layers.30.mlp.gate": {
492
+ "bits": 8
493
+ },
494
+ "model.layers.30.mlp.shared_expert_gate": {
495
+ "bits": 16
496
+ },
497
+ "model.layers.31.self_attn.q_proj": {
498
+ "bits": 8
499
+ },
500
+ "model.layers.31.self_attn.k_proj": {
501
+ "bits": 8
502
+ },
503
+ "model.layers.31.self_attn.v_proj": {
504
+ "bits": 8
505
+ },
506
+ "model.layers.31.self_attn.o_proj": {
507
+ "bits": 8
508
+ },
509
+ "model.layers.31.mlp.gate": {
510
+ "bits": 8
511
+ },
512
+ "model.layers.31.mlp.shared_expert_gate": {
513
+ "bits": 16
514
+ },
515
+ "model.layers.32.linear_attn.in_proj_qkvz": {
516
+ "bits": 8
517
+ },
518
+ "model.layers.32.linear_attn.in_proj_ba": {
519
+ "bits": 8
520
+ },
521
+ "model.layers.32.linear_attn.out_proj": {
522
+ "bits": 8
523
+ },
524
+ "model.layers.32.mlp.gate": {
525
+ "bits": 8
526
+ },
527
+ "model.layers.32.mlp.shared_expert_gate": {
528
+ "bits": 16
529
+ },
530
+ "model.layers.33.linear_attn.in_proj_qkvz": {
531
+ "bits": 8
532
+ },
533
+ "model.layers.33.linear_attn.in_proj_ba": {
534
+ "bits": 8
535
+ },
536
+ "model.layers.33.linear_attn.out_proj": {
537
+ "bits": 8
538
+ },
539
+ "model.layers.33.mlp.gate": {
540
+ "bits": 8
541
+ },
542
+ "model.layers.33.mlp.shared_expert_gate": {
543
+ "bits": 16
544
+ },
545
+ "model.layers.34.linear_attn.in_proj_qkvz": {
546
+ "bits": 8
547
+ },
548
+ "model.layers.34.linear_attn.in_proj_ba": {
549
+ "bits": 8
550
+ },
551
+ "model.layers.34.linear_attn.out_proj": {
552
+ "bits": 8
553
+ },
554
+ "model.layers.34.mlp.gate": {
555
+ "bits": 8
556
+ },
557
+ "model.layers.34.mlp.shared_expert_gate": {
558
+ "bits": 16
559
+ },
560
+ "model.layers.35.self_attn.q_proj": {
561
+ "bits": 8
562
+ },
563
+ "model.layers.35.self_attn.k_proj": {
564
+ "bits": 8
565
+ },
566
+ "model.layers.35.self_attn.v_proj": {
567
+ "bits": 8
568
+ },
569
+ "model.layers.35.self_attn.o_proj": {
570
+ "bits": 8
571
+ },
572
+ "model.layers.35.mlp.gate": {
573
+ "bits": 8
574
+ },
575
+ "model.layers.35.mlp.shared_expert_gate": {
576
+ "bits": 16
577
+ },
578
+ "model.layers.36.linear_attn.in_proj_qkvz": {
579
+ "bits": 8
580
+ },
581
+ "model.layers.36.linear_attn.in_proj_ba": {
582
+ "bits": 8
583
+ },
584
+ "model.layers.36.linear_attn.out_proj": {
585
+ "bits": 8
586
+ },
587
+ "model.layers.36.mlp.gate": {
588
+ "bits": 8
589
+ },
590
+ "model.layers.36.mlp.shared_expert_gate": {
591
+ "bits": 16
592
+ },
593
+ "model.layers.37.linear_attn.in_proj_qkvz": {
594
+ "bits": 8
595
+ },
596
+ "model.layers.37.linear_attn.in_proj_ba": {
597
+ "bits": 8
598
+ },
599
+ "model.layers.37.linear_attn.out_proj": {
600
+ "bits": 8
601
+ },
602
+ "model.layers.37.mlp.gate": {
603
+ "bits": 8
604
+ },
605
+ "model.layers.37.mlp.shared_expert_gate": {
606
+ "bits": 16
607
+ },
608
+ "model.layers.38.linear_attn.in_proj_qkvz": {
609
+ "bits": 8
610
+ },
611
+ "model.layers.38.linear_attn.in_proj_ba": {
612
+ "bits": 8
613
+ },
614
+ "model.layers.38.linear_attn.out_proj": {
615
+ "bits": 8
616
+ },
617
+ "model.layers.38.mlp.gate": {
618
+ "bits": 8
619
+ },
620
+ "model.layers.38.mlp.shared_expert_gate": {
621
+ "bits": 16
622
+ },
623
+ "model.layers.39.self_attn.q_proj": {
624
+ "bits": 8
625
+ },
626
+ "model.layers.39.self_attn.k_proj": {
627
+ "bits": 8
628
+ },
629
+ "model.layers.39.self_attn.v_proj": {
630
+ "bits": 8
631
+ },
632
+ "model.layers.39.self_attn.o_proj": {
633
+ "bits": 8
634
+ },
635
+ "model.layers.39.mlp.gate": {
636
+ "bits": 8
637
+ },
638
+ "model.layers.39.mlp.shared_expert_gate": {
639
+ "bits": 16
640
+ },
641
+ "model.layers.40.linear_attn.in_proj_qkvz": {
642
+ "bits": 8
643
+ },
644
+ "model.layers.40.linear_attn.in_proj_ba": {
645
+ "bits": 8
646
+ },
647
+ "model.layers.40.linear_attn.out_proj": {
648
+ "bits": 8
649
+ },
650
+ "model.layers.40.mlp.gate": {
651
+ "bits": 8
652
+ },
653
+ "model.layers.40.mlp.shared_expert_gate": {
654
+ "bits": 16
655
+ },
656
+ "model.layers.41.linear_attn.in_proj_qkvz": {
657
+ "bits": 8
658
+ },
659
+ "model.layers.41.linear_attn.in_proj_ba": {
660
+ "bits": 8
661
+ },
662
+ "model.layers.41.linear_attn.out_proj": {
663
+ "bits": 8
664
+ },
665
+ "model.layers.41.mlp.gate": {
666
+ "bits": 8
667
+ },
668
+ "model.layers.41.mlp.shared_expert_gate": {
669
+ "bits": 16
670
+ },
671
+ "model.layers.42.linear_attn.in_proj_qkvz": {
672
+ "bits": 8
673
+ },
674
+ "model.layers.42.linear_attn.in_proj_ba": {
675
+ "bits": 8
676
+ },
677
+ "model.layers.42.linear_attn.out_proj": {
678
+ "bits": 8
679
+ },
680
+ "model.layers.42.mlp.gate": {
681
+ "bits": 8
682
+ },
683
+ "model.layers.42.mlp.shared_expert_gate": {
684
+ "bits": 16
685
+ },
686
+ "model.layers.43.self_attn.q_proj": {
687
+ "bits": 8
688
+ },
689
+ "model.layers.43.self_attn.k_proj": {
690
+ "bits": 8
691
+ },
692
+ "model.layers.43.self_attn.v_proj": {
693
+ "bits": 8
694
+ },
695
+ "model.layers.43.self_attn.o_proj": {
696
+ "bits": 8
697
+ },
698
+ "model.layers.43.mlp.gate": {
699
+ "bits": 8
700
+ },
701
+ "model.layers.43.mlp.shared_expert_gate": {
702
+ "bits": 16
703
+ },
704
+ "model.layers.44.linear_attn.in_proj_qkvz": {
705
+ "bits": 8
706
+ },
707
+ "model.layers.44.linear_attn.in_proj_ba": {
708
+ "bits": 8
709
+ },
710
+ "model.layers.44.linear_attn.out_proj": {
711
+ "bits": 8
712
+ },
713
+ "model.layers.44.mlp.gate": {
714
+ "bits": 8
715
+ },
716
+ "model.layers.44.mlp.shared_expert_gate": {
717
+ "bits": 16
718
+ },
719
+ "model.layers.45.linear_attn.in_proj_qkvz": {
720
+ "bits": 8
721
+ },
722
+ "model.layers.45.linear_attn.in_proj_ba": {
723
+ "bits": 8
724
+ },
725
+ "model.layers.45.linear_attn.out_proj": {
726
+ "bits": 8
727
+ },
728
+ "model.layers.45.mlp.gate": {
729
+ "bits": 8
730
+ },
731
+ "model.layers.45.mlp.shared_expert_gate": {
732
+ "bits": 16
733
+ },
734
+ "model.layers.46.linear_attn.in_proj_qkvz": {
735
+ "bits": 8
736
+ },
737
+ "model.layers.46.linear_attn.in_proj_ba": {
738
+ "bits": 8
739
+ },
740
+ "model.layers.46.linear_attn.out_proj": {
741
+ "bits": 8
742
+ },
743
+ "model.layers.46.mlp.gate": {
744
+ "bits": 8
745
+ },
746
+ "model.layers.46.mlp.shared_expert_gate": {
747
+ "bits": 16
748
+ },
749
+ "model.layers.47.self_attn.q_proj": {
750
+ "bits": 8
751
+ },
752
+ "model.layers.47.self_attn.k_proj": {
753
+ "bits": 8
754
+ },
755
+ "model.layers.47.self_attn.v_proj": {
756
+ "bits": 8
757
+ },
758
+ "model.layers.47.self_attn.o_proj": {
759
+ "bits": 8
760
+ },
761
+ "model.layers.47.mlp.gate": {
762
+ "bits": 8
763
+ },
764
+ "model.layers.47.mlp.shared_expert_gate": {
765
+ "bits": 16
766
+ }
767
+ }
768
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
+ size 11422654
tokenizer_config.json ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<tool_response>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": false
188
+ },
189
+ "151666": {
190
+ "content": "</tool_response>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": false
196
+ },
197
+ "151667": {
198
+ "content": "<think>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": false
204
+ },
205
+ "151668": {
206
+ "content": "</think>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": false
212
+ }
213
+ },
214
+ "additional_special_tokens": [
215
+ "<|im_start|>",
216
+ "<|im_end|>",
217
+ "<|object_ref_start|>",
218
+ "<|object_ref_end|>",
219
+ "<|box_start|>",
220
+ "<|box_end|>",
221
+ "<|quad_start|>",
222
+ "<|quad_end|>",
223
+ "<|vision_start|>",
224
+ "<|vision_end|>",
225
+ "<|vision_pad|>",
226
+ "<|image_pad|>",
227
+ "<|video_pad|>"
228
+ ],
229
+ "bos_token": null,
230
+ "clean_up_tokenization_spaces": false,
231
+ "eos_token": "<|im_end|>",
232
+ "errors": "replace",
233
+ "extra_special_tokens": {},
234
+ "model_max_length": 1010000,
235
+ "pad_token": "<|endoftext|>",
236
+ "split_special_tokens": false,
237
+ "tokenizer_class": "Qwen2Tokenizer",
238
+ "unk_token": null
239
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff