oggydev commited on
Commit
ebf2ee5
1 Parent(s): 017a866

Upload model

Browse files
config.json CHANGED
@@ -1,32 +1,26 @@
1
  {
2
- "_name_or_path": "Vo1k/testmodel",
3
  "architectures": [
4
- "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 100257,
8
- "eos_token_id": 100257,
9
  "hidden_act": "silu",
10
- "hidden_dropout": 0.0,
11
- "hidden_size": 2048,
12
  "initializer_range": 0.02,
13
- "intermediate_size": 5632,
14
- "layer_norm_eps": 1e-05,
15
- "max_position_embeddings": 4096,
16
- "model_type": "stablelm",
17
  "num_attention_heads": 32,
18
- "num_hidden_layers": 24,
19
- "num_key_value_heads": 32,
20
- "pad_token_id": 100257,
21
- "partial_rotary_factor": 0.25,
22
- "qk_layernorm": false,
23
- "rope_scaling": null,
24
- "rope_theta": 10000,
25
  "tie_word_embeddings": false,
26
  "torch_dtype": "float32",
27
- "transformers_version": "4.40.2",
28
- "use_cache": false,
29
- "use_parallel_residual": false,
30
- "use_qkv_bias": true,
31
- "vocab_size": 100352
32
  }
 
1
  {
2
+ "_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
3
  "architectures": [
4
+ "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
  "hidden_act": "silu",
10
+ "hidden_size": 4096,
 
11
  "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 32768,
14
+ "model_type": "mistral",
 
15
  "num_attention_heads": 32,
16
+ "num_hidden_layers": 32,
17
+ "num_key_value_heads": 8,
18
+ "rms_norm_eps": 1e-05,
19
+ "rope_theta": 1000000.0,
20
+ "sliding_window": null,
 
 
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "float32",
23
+ "transformers_version": "4.41.1",
24
+ "use_cache": true,
25
+ "vocab_size": 32000
 
 
26
  }
generation_config.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 100257,
4
- "do_sample": true,
5
- "eos_token_id": 100257,
6
- "transformers_version": "4.40.2"
7
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.41.1"
 
6
  }
model-00001-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5d9e891e9ddf087b29ad5127b8ab20beeb8e94ab60209f0f4a2b81a10ca0f24
3
+ size 4987196936
model-00002-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95c760fff7199f7947f1ba02301242a3121e9d292f97696ec77b7d3d2c4fb9c7
3
+ size 4899116440
model-00003-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fa28bdda4140e796e982032dfc97a1f837bd29c5f709992cd93f8011a3f5919
3
+ size 4999813120
model-00004-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6d79ef25690bc3ae188a538eff771ee7bafffef96725c586e15661e5cc4b5a7
3
+ size 4999813128
model-00005-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44fbad91905bdad941555683babe844a3c61fe2c26b3679a3135c1a472ab622e
3
+ size 4832007496
model-00006-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f72a73170bdf07a35e2afd4f28ac21365fa21d0470378de9f4bc8ef2c61308a7
3
+ size 4249014896
model.safetensors.index.json CHANGED
@@ -1,347 +1,298 @@
1
  {
2
  "metadata": {
3
- "total_size": 6578061312
4
  },
5
  "weight_map": {
6
- "lm_head.weight": "model-00004-of-00004.safetensors",
7
- "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
- "model.layers.0.input_layernorm.bias": "model-00001-of-00004.safetensors",
9
- "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
10
- "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
11
- "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
12
- "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
13
- "model.layers.0.post_attention_layernorm.bias": "model-00001-of-00004.safetensors",
14
- "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
15
- "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
16
- "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
17
- "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
18
- "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
19
- "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
20
- "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
21
- "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
22
- "model.layers.1.input_layernorm.bias": "model-00001-of-00004.safetensors",
23
- "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
24
- "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
25
- "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
26
- "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
27
- "model.layers.1.post_attention_layernorm.bias": "model-00001-of-00004.safetensors",
28
- "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
29
- "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
30
- "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
31
- "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
32
- "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
33
- "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
34
- "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
35
- "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
36
- "model.layers.10.input_layernorm.bias": "model-00002-of-00004.safetensors",
37
- "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
38
- "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
39
- "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
40
- "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
41
- "model.layers.10.post_attention_layernorm.bias": "model-00002-of-00004.safetensors",
42
- "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
43
- "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
44
- "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
45
- "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
46
- "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
47
- "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
48
- "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
49
- "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
50
- "model.layers.11.input_layernorm.bias": "model-00002-of-00004.safetensors",
51
- "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
52
- "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
53
- "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
54
- "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
55
- "model.layers.11.post_attention_layernorm.bias": "model-00002-of-00004.safetensors",
56
- "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
57
- "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
58
- "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
59
- "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
60
- "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
61
- "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
62
- "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
63
- "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
64
- "model.layers.12.input_layernorm.bias": "model-00002-of-00004.safetensors",
65
- "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
66
- "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
67
- "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
68
- "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
69
- "model.layers.12.post_attention_layernorm.bias": "model-00002-of-00004.safetensors",
70
- "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
71
- "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
72
- "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
73
- "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
74
- "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
75
- "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
76
- "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
77
- "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
78
- "model.layers.13.input_layernorm.bias": "model-00002-of-00004.safetensors",
79
- "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
80
- "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
81
- "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
82
- "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
83
- "model.layers.13.post_attention_layernorm.bias": "model-00002-of-00004.safetensors",
84
- "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
- "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
86
- "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
87
- "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
88
- "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
89
- "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
90
- "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
91
- "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
92
- "model.layers.14.input_layernorm.bias": "model-00002-of-00004.safetensors",
93
- "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
94
- "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
95
- "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
96
- "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
97
- "model.layers.14.post_attention_layernorm.bias": "model-00002-of-00004.safetensors",
98
- "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
99
- "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
100
- "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
101
- "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
102
- "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
103
- "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
104
- "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
105
- "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
106
- "model.layers.15.input_layernorm.bias": "model-00003-of-00004.safetensors",
107
- "model.layers.15.input_layernorm.weight": "model-00003-of-00004.safetensors",
108
- "model.layers.15.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
109
- "model.layers.15.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
110
- "model.layers.15.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
111
- "model.layers.15.post_attention_layernorm.bias": "model-00003-of-00004.safetensors",
112
- "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
113
- "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
114
- "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
115
- "model.layers.15.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
116
- "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
117
- "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
118
- "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
119
- "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
120
- "model.layers.16.input_layernorm.bias": "model-00003-of-00004.safetensors",
121
- "model.layers.16.input_layernorm.weight": "model-00003-of-00004.safetensors",
122
- "model.layers.16.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
123
- "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
124
- "model.layers.16.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
125
- "model.layers.16.post_attention_layernorm.bias": "model-00003-of-00004.safetensors",
126
- "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
127
- "model.layers.16.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
128
- "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
129
- "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
130
- "model.layers.16.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
131
- "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
132
- "model.layers.16.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
133
- "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
134
- "model.layers.17.input_layernorm.bias": "model-00003-of-00004.safetensors",
135
- "model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors",
136
- "model.layers.17.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
137
- "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
138
- "model.layers.17.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
139
- "model.layers.17.post_attention_layernorm.bias": "model-00003-of-00004.safetensors",
140
- "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
141
- "model.layers.17.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
142
- "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
143
- "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
144
- "model.layers.17.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
145
- "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
146
- "model.layers.17.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
147
- "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
148
- "model.layers.18.input_layernorm.bias": "model-00003-of-00004.safetensors",
149
- "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
150
- "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
151
- "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
152
- "model.layers.18.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
153
- "model.layers.18.post_attention_layernorm.bias": "model-00003-of-00004.safetensors",
154
- "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
155
- "model.layers.18.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
156
- "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
157
- "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
158
- "model.layers.18.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
159
- "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
160
- "model.layers.18.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
161
- "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
162
- "model.layers.19.input_layernorm.bias": "model-00003-of-00004.safetensors",
163
- "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
164
- "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
165
- "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
166
- "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
167
- "model.layers.19.post_attention_layernorm.bias": "model-00003-of-00004.safetensors",
168
- "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
169
- "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
170
- "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
171
- "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
172
- "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
173
- "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
174
- "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
175
- "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
176
- "model.layers.2.input_layernorm.bias": "model-00001-of-00004.safetensors",
177
- "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
178
- "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
179
- "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
180
- "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
181
- "model.layers.2.post_attention_layernorm.bias": "model-00001-of-00004.safetensors",
182
- "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
183
- "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
184
- "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
185
- "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
186
- "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
187
- "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
188
- "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
189
- "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
190
- "model.layers.20.input_layernorm.bias": "model-00003-of-00004.safetensors",
191
- "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
192
- "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
193
- "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
194
- "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
195
- "model.layers.20.post_attention_layernorm.bias": "model-00003-of-00004.safetensors",
196
- "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
197
- "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
198
- "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
199
- "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
200
- "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
201
- "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
202
- "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
203
- "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
204
- "model.layers.21.input_layernorm.bias": "model-00003-of-00004.safetensors",
205
- "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
206
- "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
207
- "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
208
- "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
209
- "model.layers.21.post_attention_layernorm.bias": "model-00003-of-00004.safetensors",
210
- "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
211
- "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
212
- "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
213
- "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
214
- "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
215
- "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
216
- "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
217
- "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
218
- "model.layers.22.input_layernorm.bias": "model-00003-of-00004.safetensors",
219
- "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
220
- "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
221
- "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
222
- "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
223
- "model.layers.22.post_attention_layernorm.bias": "model-00003-of-00004.safetensors",
224
- "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
225
- "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
226
- "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
227
- "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
228
- "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
229
- "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
230
- "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
231
- "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
232
- "model.layers.23.input_layernorm.bias": "model-00003-of-00004.safetensors",
233
- "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
234
- "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
235
- "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
236
- "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
237
- "model.layers.23.post_attention_layernorm.bias": "model-00003-of-00004.safetensors",
238
- "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
239
- "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
240
- "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
241
- "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
242
- "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
243
- "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
244
- "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
245
- "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
246
- "model.layers.3.input_layernorm.bias": "model-00001-of-00004.safetensors",
247
- "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
248
- "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
249
- "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
250
- "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
251
- "model.layers.3.post_attention_layernorm.bias": "model-00001-of-00004.safetensors",
252
- "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
253
- "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
254
- "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
255
- "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
256
- "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
257
- "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
258
- "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
259
- "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
260
- "model.layers.4.input_layernorm.bias": "model-00001-of-00004.safetensors",
261
- "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
262
- "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
263
- "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
264
- "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
265
- "model.layers.4.post_attention_layernorm.bias": "model-00001-of-00004.safetensors",
266
- "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
267
- "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
268
- "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
269
- "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
270
- "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
271
- "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
272
- "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
273
- "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
274
- "model.layers.5.input_layernorm.bias": "model-00002-of-00004.safetensors",
275
- "model.layers.5.input_layernorm.weight": "model-00002-of-00004.safetensors",
276
- "model.layers.5.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
277
- "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
278
- "model.layers.5.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
279
- "model.layers.5.post_attention_layernorm.bias": "model-00002-of-00004.safetensors",
280
- "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
281
- "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
282
- "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
283
- "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
284
- "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
285
- "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
286
- "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
287
- "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
288
- "model.layers.6.input_layernorm.bias": "model-00002-of-00004.safetensors",
289
- "model.layers.6.input_layernorm.weight": "model-00002-of-00004.safetensors",
290
- "model.layers.6.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
291
- "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
292
- "model.layers.6.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
293
- "model.layers.6.post_attention_layernorm.bias": "model-00002-of-00004.safetensors",
294
- "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
295
- "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
296
- "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
297
- "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
298
- "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
299
- "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
300
- "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
301
- "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
302
- "model.layers.7.input_layernorm.bias": "model-00002-of-00004.safetensors",
303
- "model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors",
304
- "model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
305
- "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
306
- "model.layers.7.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
307
- "model.layers.7.post_attention_layernorm.bias": "model-00002-of-00004.safetensors",
308
- "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
309
- "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
310
- "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
311
- "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
312
- "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
313
- "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
314
- "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
315
- "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
316
- "model.layers.8.input_layernorm.bias": "model-00002-of-00004.safetensors",
317
- "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
318
- "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
319
- "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
320
- "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
321
- "model.layers.8.post_attention_layernorm.bias": "model-00002-of-00004.safetensors",
322
- "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
323
- "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
324
- "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
325
- "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
326
- "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
327
- "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
328
- "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
329
- "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
330
- "model.layers.9.input_layernorm.bias": "model-00002-of-00004.safetensors",
331
- "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
332
- "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
333
- "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
334
- "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
335
- "model.layers.9.post_attention_layernorm.bias": "model-00002-of-00004.safetensors",
336
- "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
337
- "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
338
- "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
339
- "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
340
- "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
341
- "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
342
- "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
343
- "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
344
- "model.norm.bias": "model-00003-of-00004.safetensors",
345
- "model.norm.weight": "model-00003-of-00004.safetensors"
346
  }
347
  }
 
1
  {
2
  "metadata": {
3
+ "total_size": 28966928384
4
  },
5
  "weight_map": {
6
+ "lm_head.weight": "model-00006-of-00006.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00006.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00006.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00003-of-00006.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00003-of-00006.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00003-of-00006.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00003-of-00006.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00003-of-00006.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00004-of-00006.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00004-of-00006.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00004-of-00006.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00004-of-00006.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00004-of-00006.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00004-of-00006.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00005-of-00006.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00005-of-00006.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00005-of-00006.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00005-of-00006.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00005-of-00006.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00006-of-00006.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00006-of-00006.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00006-of-00006.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00006-of-00006.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00006-of-00006.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
242
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
243
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
244
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
245
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
246
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
247
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
248
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
249
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
250
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
251
+ "model.layers.5.input_layernorm.weight": "model-00002-of-00006.safetensors",
252
+ "model.layers.5.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
253
+ "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
254
+ "model.layers.5.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
255
+ "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
256
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
257
+ "model.layers.5.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
258
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
259
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
260
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00006.safetensors",
261
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
262
+ "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
263
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
264
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
265
+ "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
266
+ "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
267
+ "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
268
+ "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
269
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
270
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
271
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
272
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
273
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
274
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
275
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
276
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
277
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
278
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
279
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
280
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
281
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
282
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
283
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
284
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
285
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
286
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
287
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
288
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
289
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
290
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
291
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
292
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
293
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
294
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
295
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
296
+ "model.norm.weight": "model-00006-of-00006.safetensors"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
297
  }
298
  }
special_tokens_map.json CHANGED
@@ -1,62 +1,20 @@
1
  {
2
- "additional_special_tokens": [
3
- "<|reg_extra|>",
4
- "<|endoftext|>",
5
- "<|fim_prefix|>",
6
- "<|fim_middle|>",
7
- "<|fim_suffix|>",
8
- "<|fim_pad|>",
9
- "<gh_stars>",
10
- "<filename>",
11
- "<issue_start>",
12
- "<issue_comment>",
13
- "<issue_closed>",
14
- "<jupyter_start>",
15
- "<jupyter_text>",
16
- "<jupyter_code>",
17
- "<jupyter_output>",
18
- "<empty_output>",
19
- "<commit_before>",
20
- "<commit_msg>",
21
- "<commit_after>",
22
- "<reponame>",
23
- "<|endofprompt|>",
24
- "<|im_start|>",
25
- "<|im_end|>",
26
- "<|pause|>",
27
- "<|reg0|>",
28
- "<|reg1|>",
29
- "<|reg2|>",
30
- "<|reg3|>",
31
- "<|reg4|>",
32
- "<|reg5|>",
33
- "<|reg6|>",
34
- "<|reg7|>",
35
- "<|extra0|>"
36
- ],
37
  "bos_token": {
38
- "content": "<|endoftext|>",
39
  "lstrip": false,
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false
43
  },
44
  "eos_token": {
45
- "content": "<|endoftext|>",
46
- "lstrip": false,
47
- "normalized": false,
48
- "rstrip": false,
49
- "single_word": false
50
- },
51
- "pad_token": {
52
- "content": "<|endoftext|>",
53
  "lstrip": false,
54
  "normalized": false,
55
  "rstrip": false,
56
  "single_word": false
57
  },
58
  "unk_token": {
59
- "content": "<|endoftext|>",
60
  "lstrip": false,
61
  "normalized": false,
62
  "rstrip": false,
 
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "bos_token": {
3
+ "content": "<s>",
4
  "lstrip": false,
5
  "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "</s>",
 
 
 
 
 
 
 
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
  "unk_token": {
17
+ "content": "<unk>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json CHANGED
@@ -1,264 +1,25 @@
1
  {
2
- "add_prefix_space": false,
 
3
  "added_tokens_decoder": {
4
- "100256": {
5
- "content": "<|reg_extra|>",
6
  "lstrip": false,
7
  "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  },
12
- "100257": {
13
- "content": "<|endoftext|>",
14
  "lstrip": false,
15
  "normalized": false,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
19
  },
20
- "100258": {
21
- "content": "<|fim_prefix|>",
22
- "lstrip": false,
23
- "normalized": false,
24
- "rstrip": false,
25
- "single_word": false,
26
- "special": true
27
- },
28
- "100259": {
29
- "content": "<|fim_middle|>",
30
- "lstrip": false,
31
- "normalized": false,
32
- "rstrip": false,
33
- "single_word": false,
34
- "special": true
35
- },
36
- "100260": {
37
- "content": "<|fim_suffix|>",
38
- "lstrip": false,
39
- "normalized": false,
40
- "rstrip": false,
41
- "single_word": false,
42
- "special": true
43
- },
44
- "100261": {
45
- "content": "<|fim_pad|>",
46
- "lstrip": false,
47
- "normalized": false,
48
- "rstrip": false,
49
- "single_word": false,
50
- "special": true
51
- },
52
- "100262": {
53
- "content": "<gh_stars>",
54
- "lstrip": false,
55
- "normalized": false,
56
- "rstrip": false,
57
- "single_word": false,
58
- "special": true
59
- },
60
- "100263": {
61
- "content": "<filename>",
62
- "lstrip": false,
63
- "normalized": false,
64
- "rstrip": false,
65
- "single_word": false,
66
- "special": true
67
- },
68
- "100264": {
69
- "content": "<issue_start>",
70
- "lstrip": false,
71
- "normalized": false,
72
- "rstrip": false,
73
- "single_word": false,
74
- "special": true
75
- },
76
- "100265": {
77
- "content": "<issue_comment>",
78
- "lstrip": false,
79
- "normalized": false,
80
- "rstrip": false,
81
- "single_word": false,
82
- "special": true
83
- },
84
- "100266": {
85
- "content": "<issue_closed>",
86
- "lstrip": false,
87
- "normalized": false,
88
- "rstrip": false,
89
- "single_word": false,
90
- "special": true
91
- },
92
- "100267": {
93
- "content": "<jupyter_start>",
94
- "lstrip": false,
95
- "normalized": false,
96
- "rstrip": false,
97
- "single_word": false,
98
- "special": true
99
- },
100
- "100268": {
101
- "content": "<jupyter_text>",
102
- "lstrip": false,
103
- "normalized": false,
104
- "rstrip": false,
105
- "single_word": false,
106
- "special": true
107
- },
108
- "100269": {
109
- "content": "<jupyter_code>",
110
- "lstrip": false,
111
- "normalized": false,
112
- "rstrip": false,
113
- "single_word": false,
114
- "special": true
115
- },
116
- "100270": {
117
- "content": "<jupyter_output>",
118
- "lstrip": false,
119
- "normalized": false,
120
- "rstrip": false,
121
- "single_word": false,
122
- "special": true
123
- },
124
- "100271": {
125
- "content": "<empty_output>",
126
- "lstrip": false,
127
- "normalized": false,
128
- "rstrip": false,
129
- "single_word": false,
130
- "special": true
131
- },
132
- "100272": {
133
- "content": "<commit_before>",
134
- "lstrip": false,
135
- "normalized": false,
136
- "rstrip": false,
137
- "single_word": false,
138
- "special": true
139
- },
140
- "100273": {
141
- "content": "<commit_msg>",
142
- "lstrip": false,
143
- "normalized": false,
144
- "rstrip": false,
145
- "single_word": false,
146
- "special": true
147
- },
148
- "100274": {
149
- "content": "<commit_after>",
150
- "lstrip": false,
151
- "normalized": false,
152
- "rstrip": false,
153
- "single_word": false,
154
- "special": true
155
- },
156
- "100275": {
157
- "content": "<reponame>",
158
- "lstrip": false,
159
- "normalized": false,
160
- "rstrip": false,
161
- "single_word": false,
162
- "special": true
163
- },
164
- "100276": {
165
- "content": "<|endofprompt|>",
166
- "lstrip": false,
167
- "normalized": false,
168
- "rstrip": false,
169
- "single_word": false,
170
- "special": true
171
- },
172
- "100277": {
173
- "content": "<|im_start|>",
174
- "lstrip": false,
175
- "normalized": false,
176
- "rstrip": false,
177
- "single_word": false,
178
- "special": true
179
- },
180
- "100278": {
181
- "content": "<|im_end|>",
182
- "lstrip": false,
183
- "normalized": false,
184
- "rstrip": false,
185
- "single_word": false,
186
- "special": true
187
- },
188
- "100279": {
189
- "content": "<|pause|>",
190
- "lstrip": false,
191
- "normalized": false,
192
- "rstrip": false,
193
- "single_word": false,
194
- "special": true
195
- },
196
- "100280": {
197
- "content": "<|reg0|>",
198
- "lstrip": false,
199
- "normalized": false,
200
- "rstrip": false,
201
- "single_word": false,
202
- "special": true
203
- },
204
- "100281": {
205
- "content": "<|reg1|>",
206
- "lstrip": false,
207
- "normalized": false,
208
- "rstrip": false,
209
- "single_word": false,
210
- "special": true
211
- },
212
- "100282": {
213
- "content": "<|reg2|>",
214
- "lstrip": false,
215
- "normalized": false,
216
- "rstrip": false,
217
- "single_word": false,
218
- "special": true
219
- },
220
- "100283": {
221
- "content": "<|reg3|>",
222
- "lstrip": false,
223
- "normalized": false,
224
- "rstrip": false,
225
- "single_word": false,
226
- "special": true
227
- },
228
- "100284": {
229
- "content": "<|reg4|>",
230
- "lstrip": false,
231
- "normalized": false,
232
- "rstrip": false,
233
- "single_word": false,
234
- "special": true
235
- },
236
- "100285": {
237
- "content": "<|reg5|>",
238
- "lstrip": false,
239
- "normalized": false,
240
- "rstrip": false,
241
- "single_word": false,
242
- "special": true
243
- },
244
- "100286": {
245
- "content": "<|reg6|>",
246
- "lstrip": false,
247
- "normalized": false,
248
- "rstrip": false,
249
- "single_word": false,
250
- "special": true
251
- },
252
- "100287": {
253
- "content": "<|reg7|>",
254
- "lstrip": false,
255
- "normalized": false,
256
- "rstrip": false,
257
- "single_word": false,
258
- "special": true
259
- },
260
- "100288": {
261
- "content": "<|extra0|>",
262
  "lstrip": false,
263
  "normalized": false,
264
  "rstrip": false,
@@ -266,46 +27,17 @@
266
  "special": true
267
  }
268
  },
269
- "additional_special_tokens": [
270
- "<|reg_extra|>",
271
- "<|endoftext|>",
272
- "<|fim_prefix|>",
273
- "<|fim_middle|>",
274
- "<|fim_suffix|>",
275
- "<|fim_pad|>",
276
- "<gh_stars>",
277
- "<filename>",
278
- "<issue_start>",
279
- "<issue_comment>",
280
- "<issue_closed>",
281
- "<jupyter_start>",
282
- "<jupyter_text>",
283
- "<jupyter_code>",
284
- "<jupyter_output>",
285
- "<empty_output>",
286
- "<commit_before>",
287
- "<commit_msg>",
288
- "<commit_after>",
289
- "<reponame>",
290
- "<|endofprompt|>",
291
- "<|im_start|>",
292
- "<|im_end|>",
293
- "<|pause|>",
294
- "<|reg0|>",
295
- "<|reg1|>",
296
- "<|reg2|>",
297
- "<|reg3|>",
298
- "<|reg4|>",
299
- "<|reg5|>",
300
- "<|reg6|>",
301
- "<|reg7|>",
302
- "<|extra0|>"
303
- ],
304
- "bos_token": "<|endoftext|>",
305
- "clean_up_tokenization_spaces": true,
306
- "eos_token": "<|endoftext|>",
307
  "model_max_length": 1000000000000000019884624838656,
308
- "pad_token": "<|endoftext|>",
309
- "tokenizer_class": "GPT2Tokenizer",
310
- "unk_token": "<|endoftext|>"
 
 
 
311
  }
 
1
  {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
  "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
  "lstrip": false,
8
  "normalized": false,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
12
  },
13
+ "1": {
14
+ "content": "<s>",
15
  "lstrip": false,
16
  "normalized": false,
17
  "rstrip": false,
18
  "single_word": false,
19
  "special": true
20
  },
21
+ "2": {
22
+ "content": "</s>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  "lstrip": false,
24
  "normalized": false,
25
  "rstrip": false,
 
27
  "special": true
28
  }
29
  },
30
+ "additional_special_tokens": [],
31
+ "bos_token": "<s>",
32
+ "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
33
+ "clean_up_tokenization_spaces": false,
34
+ "eos_token": "</s>",
35
+ "legacy": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  "model_max_length": 1000000000000000019884624838656,
37
+ "pad_token": null,
38
+ "sp_model_kwargs": {},
39
+ "spaces_between_special_tokens": false,
40
+ "tokenizer_class": "LlamaTokenizer",
41
+ "unk_token": "<unk>",
42
+ "use_default_system_prompt": false
43
  }