imfinethx commited on
Commit
2e144f7
1 Parent(s): 717f4f5

for cnn_dm

Browse files
Files changed (46) hide show
  1. anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/config.json +218 -0
  2. anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin +3 -0
  3. anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json +30 -0
  4. anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/tokenizer.json +0 -0
  5. anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/tokenizer.model +3 -0
  6. anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json +43 -0
  7. anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/added_tokens.json +40 -0
  8. anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/config.json +193 -0
  9. anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/configuration_phi.py +193 -0
  10. anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/merges.txt +0 -0
  11. anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin +3 -0
  12. anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json +23 -0
  13. anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/tokenizer.json +0 -0
  14. anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json +323 -0
  15. anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/vocab.json +0 -0
  16. anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/added_tokens.json +40 -0
  17. anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/config.json +193 -0
  18. anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/configuration_phi.py +193 -0
  19. anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/merges.txt +0 -0
  20. anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin +3 -0
  21. anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json +23 -0
  22. anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/tokenizer.json +0 -0
  23. anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json +323 -0
  24. anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/vocab.json +0 -0
  25. anyprec-stablelm-zephyr-3b-dns-3.41-2.5625-w4_orig2-gc1-c4_s100_blk512/config.json +273 -0
  26. anyprec-stablelm-zephyr-3b-dns-3.41-2.5625-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin +3 -0
  27. anyprec-stablelm-zephyr-3b-dns-3.41-2.5625-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json +30 -0
  28. anyprec-stablelm-zephyr-3b-dns-3.41-2.5625-w4_orig2-gc1-c4_s100_blk512/tokenizer.json +0 -0
  29. anyprec-stablelm-zephyr-3b-dns-3.41-2.5625-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json +213 -0
  30. anyprec-stablelm-zephyr-3b-dns-3.79-4.9365-w4_orig2-gc1-c4_s100_blk512/config.json +273 -0
  31. anyprec-stablelm-zephyr-3b-dns-3.79-4.9365-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin +3 -0
  32. anyprec-stablelm-zephyr-3b-dns-3.79-4.9365-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json +30 -0
  33. anyprec-stablelm-zephyr-3b-dns-3.79-4.9365-w4_orig2-gc1-c4_s100_blk512/tokenizer.json +0 -0
  34. anyprec-stablelm-zephyr-3b-dns-3.79-4.9365-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json +213 -0
  35. anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/config.json +273 -0
  36. anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin +3 -0
  37. anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json +30 -0
  38. anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/tokenizer.json +0 -0
  39. anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/tokenizer.model +3 -0
  40. anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json +43 -0
  41. anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/config.json +273 -0
  42. anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin +3 -0
  43. anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json +30 -0
  44. anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/tokenizer.json +0 -0
  45. anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/tokenizer.model +3 -0
  46. anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json +43 -0
anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/config.json ADDED
@@ -0,0 +1,218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "checkpoints/mtgv/MobileLLaMA-1.4B-Chat",
3
+ "anyprec": {
4
+ "arch_config": {
5
+ "layers_name": "layers",
6
+ "model_name": "model",
7
+ "module_names": [
8
+ "self_attn.q_proj",
9
+ "self_attn.k_proj",
10
+ "self_attn.v_proj",
11
+ "self_attn.o_proj",
12
+ "mlp.gate_proj",
13
+ "mlp.up_proj",
14
+ "mlp.down_proj"
15
+ ]
16
+ },
17
+ "group_count": 1,
18
+ "parent_precision": 4,
19
+ "seed_precision": 2,
20
+ "sparse_numvals": {
21
+ "model.layers.0.mlp.down_proj": 191326,
22
+ "model.layers.0.mlp.gate_proj": 189994,
23
+ "model.layers.0.mlp.up_proj": 180517,
24
+ "model.layers.0.self_attn.k_proj": 176786,
25
+ "model.layers.0.self_attn.o_proj": 72994,
26
+ "model.layers.0.self_attn.q_proj": 158594,
27
+ "model.layers.0.self_attn.v_proj": 81335,
28
+ "model.layers.1.mlp.down_proj": 185829,
29
+ "model.layers.1.mlp.gate_proj": 191585,
30
+ "model.layers.1.mlp.up_proj": 180912,
31
+ "model.layers.1.self_attn.k_proj": 355737,
32
+ "model.layers.1.self_attn.o_proj": 125591,
33
+ "model.layers.1.self_attn.q_proj": 351572,
34
+ "model.layers.1.self_attn.v_proj": 92291,
35
+ "model.layers.10.mlp.down_proj": 185125,
36
+ "model.layers.10.mlp.gate_proj": 212731,
37
+ "model.layers.10.mlp.up_proj": 191684,
38
+ "model.layers.10.self_attn.k_proj": 192285,
39
+ "model.layers.10.self_attn.o_proj": 85016,
40
+ "model.layers.10.self_attn.q_proj": 176785,
41
+ "model.layers.10.self_attn.v_proj": 88862,
42
+ "model.layers.11.mlp.down_proj": 187377,
43
+ "model.layers.11.mlp.gate_proj": 214387,
44
+ "model.layers.11.mlp.up_proj": 193449,
45
+ "model.layers.11.self_attn.k_proj": 183041,
46
+ "model.layers.11.self_attn.o_proj": 79355,
47
+ "model.layers.11.self_attn.q_proj": 179551,
48
+ "model.layers.11.self_attn.v_proj": 85991,
49
+ "model.layers.12.mlp.down_proj": 193165,
50
+ "model.layers.12.mlp.gate_proj": 228979,
51
+ "model.layers.12.mlp.up_proj": 202530,
52
+ "model.layers.12.self_attn.k_proj": 168033,
53
+ "model.layers.12.self_attn.o_proj": 74731,
54
+ "model.layers.12.self_attn.q_proj": 159784,
55
+ "model.layers.12.self_attn.v_proj": 85258,
56
+ "model.layers.13.mlp.down_proj": 199578,
57
+ "model.layers.13.mlp.gate_proj": 254937,
58
+ "model.layers.13.mlp.up_proj": 205412,
59
+ "model.layers.13.self_attn.k_proj": 168574,
60
+ "model.layers.13.self_attn.o_proj": 80155,
61
+ "model.layers.13.self_attn.q_proj": 166589,
62
+ "model.layers.13.self_attn.v_proj": 96907,
63
+ "model.layers.14.mlp.down_proj": 206123,
64
+ "model.layers.14.mlp.gate_proj": 267261,
65
+ "model.layers.14.mlp.up_proj": 213029,
66
+ "model.layers.14.self_attn.k_proj": 175625,
67
+ "model.layers.14.self_attn.o_proj": 79709,
68
+ "model.layers.14.self_attn.q_proj": 165058,
69
+ "model.layers.14.self_attn.v_proj": 92779,
70
+ "model.layers.15.mlp.down_proj": 202519,
71
+ "model.layers.15.mlp.gate_proj": 259676,
72
+ "model.layers.15.mlp.up_proj": 213468,
73
+ "model.layers.15.self_attn.k_proj": 178037,
74
+ "model.layers.15.self_attn.o_proj": 82764,
75
+ "model.layers.15.self_attn.q_proj": 178982,
76
+ "model.layers.15.self_attn.v_proj": 95999,
77
+ "model.layers.16.mlp.down_proj": 201854,
78
+ "model.layers.16.mlp.gate_proj": 250550,
79
+ "model.layers.16.mlp.up_proj": 213411,
80
+ "model.layers.16.self_attn.k_proj": 160056,
81
+ "model.layers.16.self_attn.o_proj": 83224,
82
+ "model.layers.16.self_attn.q_proj": 157716,
83
+ "model.layers.16.self_attn.v_proj": 92808,
84
+ "model.layers.17.mlp.down_proj": 198730,
85
+ "model.layers.17.mlp.gate_proj": 237767,
86
+ "model.layers.17.mlp.up_proj": 209909,
87
+ "model.layers.17.self_attn.k_proj": 171644,
88
+ "model.layers.17.self_attn.o_proj": 83754,
89
+ "model.layers.17.self_attn.q_proj": 172707,
90
+ "model.layers.17.self_attn.v_proj": 90734,
91
+ "model.layers.18.mlp.down_proj": 192702,
92
+ "model.layers.18.mlp.gate_proj": 222867,
93
+ "model.layers.18.mlp.up_proj": 206730,
94
+ "model.layers.18.self_attn.k_proj": 156690,
95
+ "model.layers.18.self_attn.o_proj": 84609,
96
+ "model.layers.18.self_attn.q_proj": 174408,
97
+ "model.layers.18.self_attn.v_proj": 89341,
98
+ "model.layers.19.mlp.down_proj": 191205,
99
+ "model.layers.19.mlp.gate_proj": 213948,
100
+ "model.layers.19.mlp.up_proj": 201900,
101
+ "model.layers.19.self_attn.k_proj": 155221,
102
+ "model.layers.19.self_attn.o_proj": 100111,
103
+ "model.layers.19.self_attn.q_proj": 154902,
104
+ "model.layers.19.self_attn.v_proj": 102790,
105
+ "model.layers.2.mlp.down_proj": 180930,
106
+ "model.layers.2.mlp.gate_proj": 178939,
107
+ "model.layers.2.mlp.up_proj": 178176,
108
+ "model.layers.2.self_attn.k_proj": 254302,
109
+ "model.layers.2.self_attn.o_proj": 85627,
110
+ "model.layers.2.self_attn.q_proj": 208855,
111
+ "model.layers.2.self_attn.v_proj": 78918,
112
+ "model.layers.20.mlp.down_proj": 189531,
113
+ "model.layers.20.mlp.gate_proj": 208030,
114
+ "model.layers.20.mlp.up_proj": 200150,
115
+ "model.layers.20.self_attn.k_proj": 136537,
116
+ "model.layers.20.self_attn.o_proj": 91412,
117
+ "model.layers.20.self_attn.q_proj": 136465,
118
+ "model.layers.20.self_attn.v_proj": 87525,
119
+ "model.layers.21.mlp.down_proj": 189005,
120
+ "model.layers.21.mlp.gate_proj": 197507,
121
+ "model.layers.21.mlp.up_proj": 192699,
122
+ "model.layers.21.self_attn.k_proj": 140343,
123
+ "model.layers.21.self_attn.o_proj": 117585,
124
+ "model.layers.21.self_attn.q_proj": 134785,
125
+ "model.layers.21.self_attn.v_proj": 111843,
126
+ "model.layers.22.mlp.down_proj": 204005,
127
+ "model.layers.22.mlp.gate_proj": 210880,
128
+ "model.layers.22.mlp.up_proj": 200959,
129
+ "model.layers.22.self_attn.k_proj": 150879,
130
+ "model.layers.22.self_attn.o_proj": 101330,
131
+ "model.layers.22.self_attn.q_proj": 151577,
132
+ "model.layers.22.self_attn.v_proj": 98921,
133
+ "model.layers.23.mlp.down_proj": 277332,
134
+ "model.layers.23.mlp.gate_proj": 245277,
135
+ "model.layers.23.mlp.up_proj": 258698,
136
+ "model.layers.23.self_attn.k_proj": 118274,
137
+ "model.layers.23.self_attn.o_proj": 114643,
138
+ "model.layers.23.self_attn.q_proj": 113038,
139
+ "model.layers.23.self_attn.v_proj": 107870,
140
+ "model.layers.3.mlp.down_proj": 178845,
141
+ "model.layers.3.mlp.gate_proj": 177565,
142
+ "model.layers.3.mlp.up_proj": 178331,
143
+ "model.layers.3.self_attn.k_proj": 187462,
144
+ "model.layers.3.self_attn.o_proj": 70922,
145
+ "model.layers.3.self_attn.q_proj": 152011,
146
+ "model.layers.3.self_attn.v_proj": 72948,
147
+ "model.layers.4.mlp.down_proj": 183192,
148
+ "model.layers.4.mlp.gate_proj": 181505,
149
+ "model.layers.4.mlp.up_proj": 180662,
150
+ "model.layers.4.self_attn.k_proj": 186191,
151
+ "model.layers.4.self_attn.o_proj": 68503,
152
+ "model.layers.4.self_attn.q_proj": 143279,
153
+ "model.layers.4.self_attn.v_proj": 71540,
154
+ "model.layers.5.mlp.down_proj": 182326,
155
+ "model.layers.5.mlp.gate_proj": 180629,
156
+ "model.layers.5.mlp.up_proj": 180722,
157
+ "model.layers.5.self_attn.k_proj": 179644,
158
+ "model.layers.5.self_attn.o_proj": 78534,
159
+ "model.layers.5.self_attn.q_proj": 155296,
160
+ "model.layers.5.self_attn.v_proj": 84819,
161
+ "model.layers.6.mlp.down_proj": 179180,
162
+ "model.layers.6.mlp.gate_proj": 187118,
163
+ "model.layers.6.mlp.up_proj": 179770,
164
+ "model.layers.6.self_attn.k_proj": 181747,
165
+ "model.layers.6.self_attn.o_proj": 86711,
166
+ "model.layers.6.self_attn.q_proj": 156780,
167
+ "model.layers.6.self_attn.v_proj": 97342,
168
+ "model.layers.7.mlp.down_proj": 179455,
169
+ "model.layers.7.mlp.gate_proj": 190350,
170
+ "model.layers.7.mlp.up_proj": 185410,
171
+ "model.layers.7.self_attn.k_proj": 152715,
172
+ "model.layers.7.self_attn.o_proj": 75703,
173
+ "model.layers.7.self_attn.q_proj": 130349,
174
+ "model.layers.7.self_attn.v_proj": 81380,
175
+ "model.layers.8.mlp.down_proj": 184587,
176
+ "model.layers.8.mlp.gate_proj": 206660,
177
+ "model.layers.8.mlp.up_proj": 192351,
178
+ "model.layers.8.self_attn.k_proj": 167562,
179
+ "model.layers.8.self_attn.o_proj": 90781,
180
+ "model.layers.8.self_attn.q_proj": 142073,
181
+ "model.layers.8.self_attn.v_proj": 103015,
182
+ "model.layers.9.mlp.down_proj": 183099,
183
+ "model.layers.9.mlp.gate_proj": 202416,
184
+ "model.layers.9.mlp.up_proj": 192308,
185
+ "model.layers.9.self_attn.k_proj": 184713,
186
+ "model.layers.9.self_attn.o_proj": 92257,
187
+ "model.layers.9.self_attn.q_proj": 170487,
188
+ "model.layers.9.self_attn.v_proj": 101971
189
+ }
190
+ },
191
+ "architectures": [
192
+ "LlamaForCausalLM"
193
+ ],
194
+ "attention_bias": false,
195
+ "attention_dropout": 0.0,
196
+ "bos_token_id": 1,
197
+ "eos_token_id": 2,
198
+ "hidden_act": "silu",
199
+ "hidden_size": 2048,
200
+ "initializer_range": 0.02,
201
+ "intermediate_size": 5632,
202
+ "max_position_embeddings": 2048,
203
+ "max_sequence_length": 2048,
204
+ "model_type": "llama",
205
+ "num_attention_heads": 16,
206
+ "num_hidden_layers": 24,
207
+ "num_key_value_heads": 16,
208
+ "pad_token_id": 0,
209
+ "pretraining_tp": 1,
210
+ "rms_norm_eps": 1e-06,
211
+ "rope_scaling": null,
212
+ "rope_theta": 10000.0,
213
+ "tie_word_embeddings": false,
214
+ "torch_dtype": "float16",
215
+ "transformers_version": "4.39.3",
216
+ "use_cache": true,
217
+ "vocab_size": 32000
218
+ }
anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a219460ca306503740e986be225a1cd0716a761ecfb8ee554514cbf59f98f1b
3
+ size 1072670169
anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": true,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
anyprec-MobileLLaMA-1.4B-Chat-dns-3.35-2.19-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": true,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": true,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "legacy": true,
35
+ "model_max_length": 2048,
36
+ "pad_token": "<unk>",
37
+ "padding_side": "right",
38
+ "sp_model_kwargs": {},
39
+ "spaces_between_special_tokens": false,
40
+ "tokenizer_class": "LlamaTokenizer",
41
+ "unk_token": "<unk>",
42
+ "use_default_system_prompt": false
43
+ }
anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/added_tokens.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "\t\t": 50294,
3
+ "\t\t\t": 50293,
4
+ "\t\t\t\t": 50292,
5
+ "\t\t\t\t\t": 50291,
6
+ "\t\t\t\t\t\t": 50290,
7
+ "\t\t\t\t\t\t\t": 50289,
8
+ "\t\t\t\t\t\t\t\t": 50288,
9
+ "\t\t\t\t\t\t\t\t\t": 50287,
10
+ " ": 50286,
11
+ " ": 50285,
12
+ " ": 50284,
13
+ " ": 50283,
14
+ " ": 50282,
15
+ " ": 50281,
16
+ " ": 50280,
17
+ " ": 50279,
18
+ " ": 50278,
19
+ " ": 50277,
20
+ " ": 50276,
21
+ " ": 50275,
22
+ " ": 50274,
23
+ " ": 50273,
24
+ " ": 50272,
25
+ " ": 50271,
26
+ " ": 50270,
27
+ " ": 50269,
28
+ " ": 50268,
29
+ " ": 50267,
30
+ " ": 50266,
31
+ " ": 50265,
32
+ " ": 50264,
33
+ " ": 50263,
34
+ " ": 50262,
35
+ " ": 50261,
36
+ " ": 50260,
37
+ " ": 50259,
38
+ " ": 50258,
39
+ " ": 50257
40
+ }
anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/config.json ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "checkpoints/microsoft/phi-1_5",
3
+ "anyprec": {
4
+ "arch_config": {
5
+ "layers_name": "layers",
6
+ "model_name": "model",
7
+ "module_names": [
8
+ "self_attn.q_proj",
9
+ "self_attn.k_proj",
10
+ "self_attn.v_proj",
11
+ "self_attn.dense",
12
+ "mlp.fc1",
13
+ "mlp.fc2"
14
+ ]
15
+ },
16
+ "group_count": 1,
17
+ "parent_precision": 4,
18
+ "seed_precision": 2,
19
+ "sparse_numvals": {
20
+ "model.layers.0.mlp.fc1": 675311,
21
+ "model.layers.0.mlp.fc2": 624542,
22
+ "model.layers.0.self_attn.dense": 82454,
23
+ "model.layers.0.self_attn.k_proj": 180985,
24
+ "model.layers.0.self_attn.q_proj": 156889,
25
+ "model.layers.0.self_attn.v_proj": 94332,
26
+ "model.layers.1.mlp.fc1": 201243,
27
+ "model.layers.1.mlp.fc2": 325345,
28
+ "model.layers.1.self_attn.dense": 73540,
29
+ "model.layers.1.self_attn.k_proj": 103462,
30
+ "model.layers.1.self_attn.q_proj": 99058,
31
+ "model.layers.1.self_attn.v_proj": 86333,
32
+ "model.layers.10.mlp.fc1": 295445,
33
+ "model.layers.10.mlp.fc2": 323451,
34
+ "model.layers.10.self_attn.dense": 72926,
35
+ "model.layers.10.self_attn.k_proj": 100535,
36
+ "model.layers.10.self_attn.q_proj": 94643,
37
+ "model.layers.10.self_attn.v_proj": 85469,
38
+ "model.layers.11.mlp.fc1": 291450,
39
+ "model.layers.11.mlp.fc2": 321914,
40
+ "model.layers.11.self_attn.dense": 71540,
41
+ "model.layers.11.self_attn.k_proj": 97013,
42
+ "model.layers.11.self_attn.q_proj": 89427,
43
+ "model.layers.11.self_attn.v_proj": 82468,
44
+ "model.layers.12.mlp.fc1": 285509,
45
+ "model.layers.12.mlp.fc2": 328599,
46
+ "model.layers.12.self_attn.dense": 69830,
47
+ "model.layers.12.self_attn.k_proj": 101851,
48
+ "model.layers.12.self_attn.q_proj": 94202,
49
+ "model.layers.12.self_attn.v_proj": 84071,
50
+ "model.layers.13.mlp.fc1": 277413,
51
+ "model.layers.13.mlp.fc2": 308466,
52
+ "model.layers.13.self_attn.dense": 74257,
53
+ "model.layers.13.self_attn.k_proj": 101329,
54
+ "model.layers.13.self_attn.q_proj": 94394,
55
+ "model.layers.13.self_attn.v_proj": 83090,
56
+ "model.layers.14.mlp.fc1": 272080,
57
+ "model.layers.14.mlp.fc2": 347434,
58
+ "model.layers.14.self_attn.dense": 77486,
59
+ "model.layers.14.self_attn.k_proj": 99568,
60
+ "model.layers.14.self_attn.q_proj": 97367,
61
+ "model.layers.14.self_attn.v_proj": 85949,
62
+ "model.layers.15.mlp.fc1": 262687,
63
+ "model.layers.15.mlp.fc2": 326298,
64
+ "model.layers.15.self_attn.dense": 71091,
65
+ "model.layers.15.self_attn.k_proj": 99666,
66
+ "model.layers.15.self_attn.q_proj": 114009,
67
+ "model.layers.15.self_attn.v_proj": 79472,
68
+ "model.layers.16.mlp.fc1": 255098,
69
+ "model.layers.16.mlp.fc2": 365511,
70
+ "model.layers.16.self_attn.dense": 69991,
71
+ "model.layers.16.self_attn.k_proj": 97043,
72
+ "model.layers.16.self_attn.q_proj": 101120,
73
+ "model.layers.16.self_attn.v_proj": 75770,
74
+ "model.layers.17.mlp.fc1": 245724,
75
+ "model.layers.17.mlp.fc2": 337114,
76
+ "model.layers.17.self_attn.dense": 70591,
77
+ "model.layers.17.self_attn.k_proj": 92819,
78
+ "model.layers.17.self_attn.q_proj": 91892,
79
+ "model.layers.17.self_attn.v_proj": 74253,
80
+ "model.layers.18.mlp.fc1": 240816,
81
+ "model.layers.18.mlp.fc2": 335322,
82
+ "model.layers.18.self_attn.dense": 79137,
83
+ "model.layers.18.self_attn.k_proj": 98409,
84
+ "model.layers.18.self_attn.q_proj": 123879,
85
+ "model.layers.18.self_attn.v_proj": 85293,
86
+ "model.layers.19.mlp.fc1": 234256,
87
+ "model.layers.19.mlp.fc2": 317669,
88
+ "model.layers.19.self_attn.dense": 80277,
89
+ "model.layers.19.self_attn.k_proj": 97895,
90
+ "model.layers.19.self_attn.q_proj": 121723,
91
+ "model.layers.19.self_attn.v_proj": 82971,
92
+ "model.layers.2.mlp.fc1": 233518,
93
+ "model.layers.2.mlp.fc2": 315355,
94
+ "model.layers.2.self_attn.dense": 68242,
95
+ "model.layers.2.self_attn.k_proj": 102966,
96
+ "model.layers.2.self_attn.q_proj": 98281,
97
+ "model.layers.2.self_attn.v_proj": 83855,
98
+ "model.layers.20.mlp.fc1": 230157,
99
+ "model.layers.20.mlp.fc2": 317412,
100
+ "model.layers.20.self_attn.dense": 70557,
101
+ "model.layers.20.self_attn.k_proj": 96874,
102
+ "model.layers.20.self_attn.q_proj": 117460,
103
+ "model.layers.20.self_attn.v_proj": 75849,
104
+ "model.layers.21.mlp.fc1": 227363,
105
+ "model.layers.21.mlp.fc2": 323600,
106
+ "model.layers.21.self_attn.dense": 73035,
107
+ "model.layers.21.self_attn.k_proj": 93176,
108
+ "model.layers.21.self_attn.q_proj": 124248,
109
+ "model.layers.21.self_attn.v_proj": 75505,
110
+ "model.layers.22.mlp.fc1": 233020,
111
+ "model.layers.22.mlp.fc2": 395456,
112
+ "model.layers.22.self_attn.dense": 71502,
113
+ "model.layers.22.self_attn.k_proj": 88462,
114
+ "model.layers.22.self_attn.q_proj": 162865,
115
+ "model.layers.22.self_attn.v_proj": 73909,
116
+ "model.layers.23.mlp.fc1": 285355,
117
+ "model.layers.23.mlp.fc2": 631745,
118
+ "model.layers.23.self_attn.dense": 101963,
119
+ "model.layers.23.self_attn.k_proj": 107304,
120
+ "model.layers.23.self_attn.q_proj": 260586,
121
+ "model.layers.23.self_attn.v_proj": 107005,
122
+ "model.layers.3.mlp.fc1": 269841,
123
+ "model.layers.3.mlp.fc2": 330081,
124
+ "model.layers.3.self_attn.dense": 76738,
125
+ "model.layers.3.self_attn.k_proj": 114282,
126
+ "model.layers.3.self_attn.q_proj": 110068,
127
+ "model.layers.3.self_attn.v_proj": 96670,
128
+ "model.layers.4.mlp.fc1": 305604,
129
+ "model.layers.4.mlp.fc2": 333478,
130
+ "model.layers.4.self_attn.dense": 73404,
131
+ "model.layers.4.self_attn.k_proj": 105649,
132
+ "model.layers.4.self_attn.q_proj": 102666,
133
+ "model.layers.4.self_attn.v_proj": 92391,
134
+ "model.layers.5.mlp.fc1": 293406,
135
+ "model.layers.5.mlp.fc2": 337582,
136
+ "model.layers.5.self_attn.dense": 71678,
137
+ "model.layers.5.self_attn.k_proj": 120017,
138
+ "model.layers.5.self_attn.q_proj": 121205,
139
+ "model.layers.5.self_attn.v_proj": 92099,
140
+ "model.layers.6.mlp.fc1": 291972,
141
+ "model.layers.6.mlp.fc2": 329924,
142
+ "model.layers.6.self_attn.dense": 81259,
143
+ "model.layers.6.self_attn.k_proj": 104051,
144
+ "model.layers.6.self_attn.q_proj": 100833,
145
+ "model.layers.6.self_attn.v_proj": 93397,
146
+ "model.layers.7.mlp.fc1": 293548,
147
+ "model.layers.7.mlp.fc2": 331966,
148
+ "model.layers.7.self_attn.dense": 68519,
149
+ "model.layers.7.self_attn.k_proj": 108909,
150
+ "model.layers.7.self_attn.q_proj": 103642,
151
+ "model.layers.7.self_attn.v_proj": 84278,
152
+ "model.layers.8.mlp.fc1": 304480,
153
+ "model.layers.8.mlp.fc2": 318568,
154
+ "model.layers.8.self_attn.dense": 76294,
155
+ "model.layers.8.self_attn.k_proj": 110748,
156
+ "model.layers.8.self_attn.q_proj": 103303,
157
+ "model.layers.8.self_attn.v_proj": 91497,
158
+ "model.layers.9.mlp.fc1": 298086,
159
+ "model.layers.9.mlp.fc2": 319091,
160
+ "model.layers.9.self_attn.dense": 68561,
161
+ "model.layers.9.self_attn.k_proj": 109187,
162
+ "model.layers.9.self_attn.q_proj": 103326,
163
+ "model.layers.9.self_attn.v_proj": 83167
164
+ }
165
+ },
166
+ "architectures": [
167
+ "PhiForCausalLM"
168
+ ],
169
+ "attention_dropout": 0.0,
170
+ "bos_token_id": null,
171
+ "embd_pdrop": 0.0,
172
+ "eos_token_id": null,
173
+ "hidden_act": "gelu_new",
174
+ "hidden_size": 2048,
175
+ "initializer_range": 0.02,
176
+ "intermediate_size": 8192,
177
+ "layer_norm_eps": 1e-05,
178
+ "max_position_embeddings": 2048,
179
+ "model_type": "phi",
180
+ "num_attention_heads": 32,
181
+ "num_hidden_layers": 24,
182
+ "num_key_value_heads": 32,
183
+ "partial_rotary_factor": 0.5,
184
+ "qk_layernorm": false,
185
+ "resid_pdrop": 0.0,
186
+ "rope_scaling": null,
187
+ "rope_theta": 10000.0,
188
+ "tie_word_embeddings": false,
189
+ "torch_dtype": "float16",
190
+ "transformers_version": "4.39.3",
191
+ "use_cache": true,
192
+ "vocab_size": 51200
193
+ }
anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/configuration_phi.py ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2023 Microsoft and the HuggingFace Inc. team. All rights reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ """ Phi model configuration"""
17
+
18
+
19
+ from transformers.configuration_utils import PretrainedConfig
20
+ from transformers.utils import logging
21
+
22
+
23
+ logger = logging.get_logger(__name__)
24
+
25
+ PHI_PRETRAINED_CONFIG_ARCHIVE_MAP = {
26
+ "microsoft/phi-1_5": "https://huggingface.co/microsoft/phi-1_5/resolve/main/config.json",
27
+ }
28
+
29
+
30
+ class PhiConfig(PretrainedConfig):
31
+ r"""
32
+ This is the configuration class to store the configuration of a [`PhiModel`]. It is used to instantiate an Phi
33
+ model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
34
+ defaults will yield a similar configuration to that of the Phi
35
+ [microsoft/phi-1](https://huggingface.co/microsoft/phi-1).
36
+
37
+ Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
38
+ documentation from [`PretrainedConfig`] for more information.
39
+
40
+ Args:
41
+ vocab_size (`int`, *optional*, defaults to 51200):
42
+ Vocabulary size of the Phi model. Defines the number of different tokens that can be represented by the
43
+ `inputs_ids` passed when calling [`PhiModel`].
44
+ hidden_size (`int`, *optional*, defaults to 2048):
45
+ Dimension of the hidden representations.
46
+ intermediate_size (`int`, *optional*, defaults to 8192):
47
+ Dimension of the MLP representations.
48
+ num_hidden_layers (`int`, *optional*, defaults to 24):
49
+ Number of hidden layers in the Transformer decoder.
50
+ num_attention_heads (`int`, *optional*, defaults to 32):
51
+ Number of attention heads for each attention layer in the Transformer decoder.
52
+ num_key_value_heads (`int`, *optional*):
53
+ This is the number of key_value heads that should be used to implement Grouped Query Attention. If
54
+ `num_key_value_heads=num_attention_heads`, the model will use Multi Head Attention (MHA), if
55
+ `num_key_value_heads=1 the model will use Multi Query Attention (MQA) otherwise GQA is used. When
56
+ converting a multi-head checkpoint to a GQA checkpoint, each group key and value head should be constructed
57
+ by meanpooling all the original heads within that group. For more details checkout [this
58
+ paper](https://arxiv.org/pdf/2305.13245.pdf). If it is not specified, will default to
59
+ `num_attention_heads`.
60
+ resid_pdrop (`float`, *optional*, defaults to 0.0):
61
+ Dropout probability for mlp outputs.
62
+ embd_pdrop (`int`, *optional*, defaults to 0.0):
63
+ The dropout ratio for the embeddings.
64
+ attention_dropout (`float`, *optional*, defaults to 0.0):
65
+ The dropout ratio after computing the attention scores.
66
+ hidden_act (`str` or `function`, *optional*, defaults to `"gelu_new"`):
67
+ The non-linear activation function (function or string) in the decoder.
68
+ max_position_embeddings (`int`, *optional*, defaults to 2048):
69
+ The maximum sequence length that this model might ever be used with. Phi-1 and Phi-1.5 supports up to 2048
70
+ tokens.
71
+ initializer_range (`float`, *optional*, defaults to 0.02):
72
+ The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
73
+ layer_norm_eps (`float`, *optional*, defaults to 1e-05):
74
+ The epsilon used by the rms normalization layers.
75
+ use_cache (`bool`, *optional*, defaults to `True`):
76
+ Whether or not the model should return the last key/values attentions (not used by all models). Only
77
+ relevant if `config.is_decoder=True`. Whether to tie weight embeddings or not.
78
+ tie_word_embeddings (`bool`, *optional*, defaults to `False`):
79
+ Whether to tie weight embeddings
80
+ rope_theta (`float`, *optional*, defaults to 10000.0):
81
+ The base period of the RoPE embeddings.
82
+ rope_scaling (`Dict`, *optional*):
83
+ Dictionary containing the scaling configuration for the RoPE embeddings. Currently supports two scaling
84
+ strategies: linear and dynamic. Their scaling factor must be an float greater than 1. The expected format
85
+ is `{"type": strategy name, "factor": scaling factor}`. When using this flag, don't update
86
+ `max_position_embeddings` to the expected new maximum. See the following thread for more information on how
87
+ these scaling strategies behave:
88
+ https://www.reddit.com/r/LocalPersimmon/comments/14mrgpr/dynamically_scaled_rope_further_increases/. This
89
+ is an experimental feature, subject to breaking API changes in future versions.
90
+ partial_rotary_factor (`float`, *optional*, defaults to 0.5):
91
+ Percentage of the query and keys which will have rotary embedding.
92
+ qk_layernorm (`bool`, *optional*, defaults to `False`):
93
+ Whether or not to normalize the Queries and Keys after projecting the hidden states.
94
+ bos_token_id (`int`, *optional*, defaults to 1):
95
+ Denotes beginning of sequences token id.
96
+ eos_token_id (`int`, *optional*, defaults to 2):
97
+ Denotes end of sequences token id.
98
+
99
+ Example:
100
+
101
+ ```python
102
+ >>> from transformers import PhiModel, PhiConfig
103
+
104
+ >>> # Initializing a Phi-1 style configuration
105
+ >>> configuration = PhiConfig.from_pretrained("microsoft/phi-1")
106
+
107
+ >>> # Initializing a model from the configuration
108
+ >>> model = PhiModel(configuration)
109
+
110
+ >>> # Accessing the model configuration
111
+ >>> configuration = model.config
112
+ ```"""
113
+
114
+ model_type = "phi"
115
+ keys_to_ignore_at_inference = ["past_key_values"]
116
+
117
+ def __init__(
118
+ self,
119
+ vocab_size=51200,
120
+ hidden_size=2048,
121
+ intermediate_size=8192,
122
+ num_hidden_layers=24,
123
+ num_attention_heads=32,
124
+ num_key_value_heads=None,
125
+ resid_pdrop=0.0,
126
+ embd_pdrop=0.0,
127
+ attention_dropout=0.0,
128
+ hidden_act="gelu_new",
129
+ max_position_embeddings=2048,
130
+ initializer_range=0.02,
131
+ layer_norm_eps=1e-5,
132
+ use_cache=True,
133
+ tie_word_embeddings=False,
134
+ rope_theta=10000.0,
135
+ rope_scaling=None,
136
+ partial_rotary_factor=0.5,
137
+ qk_layernorm=False,
138
+ bos_token_id=1,
139
+ eos_token_id=2,
140
+ **kwargs,
141
+ ):
142
+ self.vocab_size = vocab_size
143
+ self.hidden_size = hidden_size
144
+ self.intermediate_size = intermediate_size
145
+ self.num_hidden_layers = num_hidden_layers
146
+ self.num_attention_heads = num_attention_heads
147
+
148
+ if num_key_value_heads is None:
149
+ num_key_value_heads = num_attention_heads
150
+
151
+ self.num_key_value_heads = num_key_value_heads
152
+ self.resid_pdrop = resid_pdrop
153
+ self.embd_pdrop = embd_pdrop
154
+ self.attention_dropout = attention_dropout
155
+ self.hidden_act = hidden_act
156
+ self.max_position_embeddings = max_position_embeddings
157
+ self.initializer_range = initializer_range
158
+ self.layer_norm_eps = layer_norm_eps
159
+ self.use_cache = use_cache
160
+ self.rope_theta = rope_theta
161
+ self.rope_scaling = rope_scaling
162
+ self.partial_rotary_factor = partial_rotary_factor
163
+ self.qk_layernorm = qk_layernorm
164
+ self._rope_scaling_validation()
165
+
166
+ super().__init__(
167
+ bos_token_id=bos_token_id,
168
+ eos_token_id=eos_token_id,
169
+ tie_word_embeddings=tie_word_embeddings,
170
+ **kwargs,
171
+ )
172
+
173
+ # Copied from transformers.models.llama.configuration_llama.LlamaConfig._rope_scaling_validation
174
+ def _rope_scaling_validation(self):
175
+ """
176
+ Validate the `rope_scaling` configuration.
177
+ """
178
+ if self.rope_scaling is None:
179
+ return
180
+
181
+ if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2:
182
+ raise ValueError(
183
+ "`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, "
184
+ f"got {self.rope_scaling}"
185
+ )
186
+ rope_scaling_type = self.rope_scaling.get("type", None)
187
+ rope_scaling_factor = self.rope_scaling.get("factor", None)
188
+ if rope_scaling_type is None or rope_scaling_type not in ["linear", "dynamic"]:
189
+ raise ValueError(
190
+ f"`rope_scaling`'s type field must be one of ['linear', 'dynamic'], got {rope_scaling_type}"
191
+ )
192
+ if rope_scaling_factor is None or not isinstance(rope_scaling_factor, float) or rope_scaling_factor <= 1.0:
193
+ raise ValueError(f"`rope_scaling`'s factor field must be a float > 1, got {rope_scaling_factor}")
anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e4c72134d75107414768e906b2e17fabe145b800abee9fb74d0b33741240ad3
3
+ size 1198618987
anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json ADDED
@@ -0,0 +1,323 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "50257": {
13
+ "content": " ",
14
+ "lstrip": false,
15
+ "normalized": true,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": false
19
+ },
20
+ "50258": {
21
+ "content": " ",
22
+ "lstrip": false,
23
+ "normalized": true,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": false
27
+ },
28
+ "50259": {
29
+ "content": " ",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": false
35
+ },
36
+ "50260": {
37
+ "content": " ",
38
+ "lstrip": false,
39
+ "normalized": true,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": false
43
+ },
44
+ "50261": {
45
+ "content": " ",
46
+ "lstrip": false,
47
+ "normalized": true,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": false
51
+ },
52
+ "50262": {
53
+ "content": " ",
54
+ "lstrip": false,
55
+ "normalized": true,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": false
59
+ },
60
+ "50263": {
61
+ "content": " ",
62
+ "lstrip": false,
63
+ "normalized": true,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": false
67
+ },
68
+ "50264": {
69
+ "content": " ",
70
+ "lstrip": false,
71
+ "normalized": true,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": false
75
+ },
76
+ "50265": {
77
+ "content": " ",
78
+ "lstrip": false,
79
+ "normalized": true,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": false
83
+ },
84
+ "50266": {
85
+ "content": " ",
86
+ "lstrip": false,
87
+ "normalized": true,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": false
91
+ },
92
+ "50267": {
93
+ "content": " ",
94
+ "lstrip": false,
95
+ "normalized": true,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": false
99
+ },
100
+ "50268": {
101
+ "content": " ",
102
+ "lstrip": false,
103
+ "normalized": true,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": false
107
+ },
108
+ "50269": {
109
+ "content": " ",
110
+ "lstrip": false,
111
+ "normalized": true,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": false
115
+ },
116
+ "50270": {
117
+ "content": " ",
118
+ "lstrip": false,
119
+ "normalized": true,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": false
123
+ },
124
+ "50271": {
125
+ "content": " ",
126
+ "lstrip": false,
127
+ "normalized": true,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": false
131
+ },
132
+ "50272": {
133
+ "content": " ",
134
+ "lstrip": false,
135
+ "normalized": true,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": false
139
+ },
140
+ "50273": {
141
+ "content": " ",
142
+ "lstrip": false,
143
+ "normalized": true,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": false
147
+ },
148
+ "50274": {
149
+ "content": " ",
150
+ "lstrip": false,
151
+ "normalized": true,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": false
155
+ },
156
+ "50275": {
157
+ "content": " ",
158
+ "lstrip": false,
159
+ "normalized": true,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": false
163
+ },
164
+ "50276": {
165
+ "content": " ",
166
+ "lstrip": false,
167
+ "normalized": true,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": false
171
+ },
172
+ "50277": {
173
+ "content": " ",
174
+ "lstrip": false,
175
+ "normalized": true,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": false
179
+ },
180
+ "50278": {
181
+ "content": " ",
182
+ "lstrip": false,
183
+ "normalized": true,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": false
187
+ },
188
+ "50279": {
189
+ "content": " ",
190
+ "lstrip": false,
191
+ "normalized": true,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": false
195
+ },
196
+ "50280": {
197
+ "content": " ",
198
+ "lstrip": false,
199
+ "normalized": true,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": false
203
+ },
204
+ "50281": {
205
+ "content": " ",
206
+ "lstrip": false,
207
+ "normalized": true,
208
+ "rstrip": false,
209
+ "single_word": false,
210
+ "special": false
211
+ },
212
+ "50282": {
213
+ "content": " ",
214
+ "lstrip": false,
215
+ "normalized": true,
216
+ "rstrip": false,
217
+ "single_word": false,
218
+ "special": false
219
+ },
220
+ "50283": {
221
+ "content": " ",
222
+ "lstrip": false,
223
+ "normalized": true,
224
+ "rstrip": false,
225
+ "single_word": false,
226
+ "special": false
227
+ },
228
+ "50284": {
229
+ "content": " ",
230
+ "lstrip": false,
231
+ "normalized": true,
232
+ "rstrip": false,
233
+ "single_word": false,
234
+ "special": false
235
+ },
236
+ "50285": {
237
+ "content": " ",
238
+ "lstrip": false,
239
+ "normalized": true,
240
+ "rstrip": false,
241
+ "single_word": false,
242
+ "special": false
243
+ },
244
+ "50286": {
245
+ "content": " ",
246
+ "lstrip": false,
247
+ "normalized": true,
248
+ "rstrip": false,
249
+ "single_word": false,
250
+ "special": false
251
+ },
252
+ "50287": {
253
+ "content": "\t\t\t\t\t\t\t\t\t",
254
+ "lstrip": false,
255
+ "normalized": true,
256
+ "rstrip": false,
257
+ "single_word": false,
258
+ "special": false
259
+ },
260
+ "50288": {
261
+ "content": "\t\t\t\t\t\t\t\t",
262
+ "lstrip": false,
263
+ "normalized": true,
264
+ "rstrip": false,
265
+ "single_word": false,
266
+ "special": false
267
+ },
268
+ "50289": {
269
+ "content": "\t\t\t\t\t\t\t",
270
+ "lstrip": false,
271
+ "normalized": true,
272
+ "rstrip": false,
273
+ "single_word": false,
274
+ "special": false
275
+ },
276
+ "50290": {
277
+ "content": "\t\t\t\t\t\t",
278
+ "lstrip": false,
279
+ "normalized": true,
280
+ "rstrip": false,
281
+ "single_word": false,
282
+ "special": false
283
+ },
284
+ "50291": {
285
+ "content": "\t\t\t\t\t",
286
+ "lstrip": false,
287
+ "normalized": true,
288
+ "rstrip": false,
289
+ "single_word": false,
290
+ "special": false
291
+ },
292
+ "50292": {
293
+ "content": "\t\t\t\t",
294
+ "lstrip": false,
295
+ "normalized": true,
296
+ "rstrip": false,
297
+ "single_word": false,
298
+ "special": false
299
+ },
300
+ "50293": {
301
+ "content": "\t\t\t",
302
+ "lstrip": false,
303
+ "normalized": true,
304
+ "rstrip": false,
305
+ "single_word": false,
306
+ "special": false
307
+ },
308
+ "50294": {
309
+ "content": "\t\t",
310
+ "lstrip": false,
311
+ "normalized": true,
312
+ "rstrip": false,
313
+ "single_word": false,
314
+ "special": false
315
+ }
316
+ },
317
+ "bos_token": "<|endoftext|>",
318
+ "clean_up_tokenization_spaces": true,
319
+ "eos_token": "<|endoftext|>",
320
+ "model_max_length": 2048,
321
+ "tokenizer_class": "CodeGenTokenizer",
322
+ "unk_token": "<|endoftext|>"
323
+ }
anyprec-phi-1_5-dns-3.32-2.0-w4_orig2-gc1-c4_s100_blk512/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/added_tokens.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "\t\t": 50294,
3
+ "\t\t\t": 50293,
4
+ "\t\t\t\t": 50292,
5
+ "\t\t\t\t\t": 50291,
6
+ "\t\t\t\t\t\t": 50290,
7
+ "\t\t\t\t\t\t\t": 50289,
8
+ "\t\t\t\t\t\t\t\t": 50288,
9
+ "\t\t\t\t\t\t\t\t\t": 50287,
10
+ " ": 50286,
11
+ " ": 50285,
12
+ " ": 50284,
13
+ " ": 50283,
14
+ " ": 50282,
15
+ " ": 50281,
16
+ " ": 50280,
17
+ " ": 50279,
18
+ " ": 50278,
19
+ " ": 50277,
20
+ " ": 50276,
21
+ " ": 50275,
22
+ " ": 50274,
23
+ " ": 50273,
24
+ " ": 50272,
25
+ " ": 50271,
26
+ " ": 50270,
27
+ " ": 50269,
28
+ " ": 50268,
29
+ " ": 50267,
30
+ " ": 50266,
31
+ " ": 50265,
32
+ " ": 50264,
33
+ " ": 50263,
34
+ " ": 50262,
35
+ " ": 50261,
36
+ " ": 50260,
37
+ " ": 50259,
38
+ " ": 50258,
39
+ " ": 50257
40
+ }
anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/config.json ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "checkpoints/microsoft/phi-1_5",
3
+ "anyprec": {
4
+ "arch_config": {
5
+ "layers_name": "layers",
6
+ "model_name": "model",
7
+ "module_names": [
8
+ "self_attn.q_proj",
9
+ "self_attn.k_proj",
10
+ "self_attn.v_proj",
11
+ "self_attn.dense",
12
+ "mlp.fc1",
13
+ "mlp.fc2"
14
+ ]
15
+ },
16
+ "group_count": 1,
17
+ "parent_precision": 4,
18
+ "seed_precision": 2,
19
+ "sparse_numvals": {
20
+ "model.layers.0.mlp.fc1": 1279820,
21
+ "model.layers.0.mlp.fc2": 1236307,
22
+ "model.layers.0.self_attn.dense": 189187,
23
+ "model.layers.0.self_attn.k_proj": 318137,
24
+ "model.layers.0.self_attn.q_proj": 285336,
25
+ "model.layers.0.self_attn.v_proj": 212019,
26
+ "model.layers.1.mlp.fc1": 539655,
27
+ "model.layers.1.mlp.fc2": 766840,
28
+ "model.layers.1.self_attn.dense": 176195,
29
+ "model.layers.1.self_attn.k_proj": 224990,
30
+ "model.layers.1.self_attn.q_proj": 219788,
31
+ "model.layers.1.self_attn.v_proj": 201422,
32
+ "model.layers.10.mlp.fc1": 710241,
33
+ "model.layers.10.mlp.fc2": 754886,
34
+ "model.layers.10.self_attn.dense": 180978,
35
+ "model.layers.10.self_attn.k_proj": 221678,
36
+ "model.layers.10.self_attn.q_proj": 212231,
37
+ "model.layers.10.self_attn.v_proj": 200830,
38
+ "model.layers.11.mlp.fc1": 710220,
39
+ "model.layers.11.mlp.fc2": 741443,
40
+ "model.layers.11.self_attn.dense": 178022,
41
+ "model.layers.11.self_attn.k_proj": 215089,
42
+ "model.layers.11.self_attn.q_proj": 207761,
43
+ "model.layers.11.self_attn.v_proj": 194583,
44
+ "model.layers.12.mlp.fc1": 697047,
45
+ "model.layers.12.mlp.fc2": 752645,
46
+ "model.layers.12.self_attn.dense": 176274,
47
+ "model.layers.12.self_attn.k_proj": 220310,
48
+ "model.layers.12.self_attn.q_proj": 213155,
49
+ "model.layers.12.self_attn.v_proj": 198321,
50
+ "model.layers.13.mlp.fc1": 687659,
51
+ "model.layers.13.mlp.fc2": 738003,
52
+ "model.layers.13.self_attn.dense": 178018,
53
+ "model.layers.13.self_attn.k_proj": 221049,
54
+ "model.layers.13.self_attn.q_proj": 210113,
55
+ "model.layers.13.self_attn.v_proj": 195362,
56
+ "model.layers.14.mlp.fc1": 682287,
57
+ "model.layers.14.mlp.fc2": 781981,
58
+ "model.layers.14.self_attn.dense": 184895,
59
+ "model.layers.14.self_attn.k_proj": 217243,
60
+ "model.layers.14.self_attn.q_proj": 213786,
61
+ "model.layers.14.self_attn.v_proj": 198728,
62
+ "model.layers.15.mlp.fc1": 666136,
63
+ "model.layers.15.mlp.fc2": 761731,
64
+ "model.layers.15.self_attn.dense": 177136,
65
+ "model.layers.15.self_attn.k_proj": 217895,
66
+ "model.layers.15.self_attn.q_proj": 232288,
67
+ "model.layers.15.self_attn.v_proj": 191081,
68
+ "model.layers.16.mlp.fc1": 656390,
69
+ "model.layers.16.mlp.fc2": 799995,
70
+ "model.layers.16.self_attn.dense": 174656,
71
+ "model.layers.16.self_attn.k_proj": 217135,
72
+ "model.layers.16.self_attn.q_proj": 219926,
73
+ "model.layers.16.self_attn.v_proj": 185798,
74
+ "model.layers.17.mlp.fc1": 639288,
75
+ "model.layers.17.mlp.fc2": 775904,
76
+ "model.layers.17.self_attn.dense": 173271,
77
+ "model.layers.17.self_attn.k_proj": 206996,
78
+ "model.layers.17.self_attn.q_proj": 205270,
79
+ "model.layers.17.self_attn.v_proj": 184931,
80
+ "model.layers.18.mlp.fc1": 632334,
81
+ "model.layers.18.mlp.fc2": 768287,
82
+ "model.layers.18.self_attn.dense": 189412,
83
+ "model.layers.18.self_attn.k_proj": 215687,
84
+ "model.layers.18.self_attn.q_proj": 242190,
85
+ "model.layers.18.self_attn.v_proj": 200154,
86
+ "model.layers.19.mlp.fc1": 625021,
87
+ "model.layers.19.mlp.fc2": 738002,
88
+ "model.layers.19.self_attn.dense": 186977,
89
+ "model.layers.19.self_attn.k_proj": 216466,
90
+ "model.layers.19.self_attn.q_proj": 240694,
91
+ "model.layers.19.self_attn.v_proj": 197648,
92
+ "model.layers.2.mlp.fc1": 621667,
93
+ "model.layers.2.mlp.fc2": 757420,
94
+ "model.layers.2.self_attn.dense": 170986,
95
+ "model.layers.2.self_attn.k_proj": 225618,
96
+ "model.layers.2.self_attn.q_proj": 217741,
97
+ "model.layers.2.self_attn.v_proj": 200228,
98
+ "model.layers.20.mlp.fc1": 614692,
99
+ "model.layers.20.mlp.fc2": 727978,
100
+ "model.layers.20.self_attn.dense": 175731,
101
+ "model.layers.20.self_attn.k_proj": 213423,
102
+ "model.layers.20.self_attn.q_proj": 236043,
103
+ "model.layers.20.self_attn.v_proj": 183771,
104
+ "model.layers.21.mlp.fc1": 618662,
105
+ "model.layers.21.mlp.fc2": 738785,
106
+ "model.layers.21.self_attn.dense": 177493,
107
+ "model.layers.21.self_attn.k_proj": 208350,
108
+ "model.layers.21.self_attn.q_proj": 237646,
109
+ "model.layers.21.self_attn.v_proj": 187251,
110
+ "model.layers.22.mlp.fc1": 629352,
111
+ "model.layers.22.mlp.fc2": 818793,
112
+ "model.layers.22.self_attn.dense": 175140,
113
+ "model.layers.22.self_attn.k_proj": 202527,
114
+ "model.layers.22.self_attn.q_proj": 284459,
115
+ "model.layers.22.self_attn.v_proj": 180999,
116
+ "model.layers.23.mlp.fc1": 711633,
117
+ "model.layers.23.mlp.fc2": 1103566,
118
+ "model.layers.23.self_attn.dense": 219201,
119
+ "model.layers.23.self_attn.k_proj": 224644,
120
+ "model.layers.23.self_attn.q_proj": 397194,
121
+ "model.layers.23.self_attn.v_proj": 230928,
122
+ "model.layers.3.mlp.fc1": 663185,
123
+ "model.layers.3.mlp.fc2": 761065,
124
+ "model.layers.3.self_attn.dense": 185269,
125
+ "model.layers.3.self_attn.k_proj": 240041,
126
+ "model.layers.3.self_attn.q_proj": 232277,
127
+ "model.layers.3.self_attn.v_proj": 214858,
128
+ "model.layers.4.mlp.fc1": 716587,
129
+ "model.layers.4.mlp.fc2": 767640,
130
+ "model.layers.4.self_attn.dense": 179773,
131
+ "model.layers.4.self_attn.k_proj": 227913,
132
+ "model.layers.4.self_attn.q_proj": 220527,
133
+ "model.layers.4.self_attn.v_proj": 211685,
134
+ "model.layers.5.mlp.fc1": 707590,
135
+ "model.layers.5.mlp.fc2": 780274,
136
+ "model.layers.5.self_attn.dense": 178504,
137
+ "model.layers.5.self_attn.k_proj": 247977,
138
+ "model.layers.5.self_attn.q_proj": 243896,
139
+ "model.layers.5.self_attn.v_proj": 207831,
140
+ "model.layers.6.mlp.fc1": 710038,
141
+ "model.layers.6.mlp.fc2": 763787,
142
+ "model.layers.6.self_attn.dense": 190308,
143
+ "model.layers.6.self_attn.k_proj": 224045,
144
+ "model.layers.6.self_attn.q_proj": 220275,
145
+ "model.layers.6.self_attn.v_proj": 212698,
146
+ "model.layers.7.mlp.fc1": 715221,
147
+ "model.layers.7.mlp.fc2": 758669,
148
+ "model.layers.7.self_attn.dense": 175635,
149
+ "model.layers.7.self_attn.k_proj": 231017,
150
+ "model.layers.7.self_attn.q_proj": 224708,
151
+ "model.layers.7.self_attn.v_proj": 200219,
152
+ "model.layers.8.mlp.fc1": 722869,
153
+ "model.layers.8.mlp.fc2": 747381,
154
+ "model.layers.8.self_attn.dense": 184555,
155
+ "model.layers.8.self_attn.k_proj": 230928,
156
+ "model.layers.8.self_attn.q_proj": 224025,
157
+ "model.layers.8.self_attn.v_proj": 206979,
158
+ "model.layers.9.mlp.fc1": 719199,
159
+ "model.layers.9.mlp.fc2": 748623,
160
+ "model.layers.9.self_attn.dense": 174700,
161
+ "model.layers.9.self_attn.k_proj": 228878,
162
+ "model.layers.9.self_attn.q_proj": 222182,
163
+ "model.layers.9.self_attn.v_proj": 199200
164
+ }
165
+ },
166
+ "architectures": [
167
+ "PhiForCausalLM"
168
+ ],
169
+ "attention_dropout": 0.0,
170
+ "bos_token_id": null,
171
+ "embd_pdrop": 0.0,
172
+ "eos_token_id": null,
173
+ "hidden_act": "gelu_new",
174
+ "hidden_size": 2048,
175
+ "initializer_range": 0.02,
176
+ "intermediate_size": 8192,
177
+ "layer_norm_eps": 1e-05,
178
+ "max_position_embeddings": 2048,
179
+ "model_type": "phi",
180
+ "num_attention_heads": 32,
181
+ "num_hidden_layers": 24,
182
+ "num_key_value_heads": 32,
183
+ "partial_rotary_factor": 0.5,
184
+ "qk_layernorm": false,
185
+ "resid_pdrop": 0.0,
186
+ "rope_scaling": null,
187
+ "rope_theta": 10000.0,
188
+ "tie_word_embeddings": false,
189
+ "torch_dtype": "float16",
190
+ "transformers_version": "4.39.3",
191
+ "use_cache": true,
192
+ "vocab_size": 51200
193
+ }
anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/configuration_phi.py ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2023 Microsoft and the HuggingFace Inc. team. All rights reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ """ Phi model configuration"""
17
+
18
+
19
+ from transformers.configuration_utils import PretrainedConfig
20
+ from transformers.utils import logging
21
+
22
+
23
+ logger = logging.get_logger(__name__)
24
+
25
+ PHI_PRETRAINED_CONFIG_ARCHIVE_MAP = {
26
+ "microsoft/phi-1_5": "https://huggingface.co/microsoft/phi-1_5/resolve/main/config.json",
27
+ }
28
+
29
+
30
+ class PhiConfig(PretrainedConfig):
31
+ r"""
32
+ This is the configuration class to store the configuration of a [`PhiModel`]. It is used to instantiate an Phi
33
+ model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
34
+ defaults will yield a similar configuration to that of the Phi
35
+ [microsoft/phi-1](https://huggingface.co/microsoft/phi-1).
36
+
37
+ Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
38
+ documentation from [`PretrainedConfig`] for more information.
39
+
40
+ Args:
41
+ vocab_size (`int`, *optional*, defaults to 51200):
42
+ Vocabulary size of the Phi model. Defines the number of different tokens that can be represented by the
43
+ `inputs_ids` passed when calling [`PhiModel`].
44
+ hidden_size (`int`, *optional*, defaults to 2048):
45
+ Dimension of the hidden representations.
46
+ intermediate_size (`int`, *optional*, defaults to 8192):
47
+ Dimension of the MLP representations.
48
+ num_hidden_layers (`int`, *optional*, defaults to 24):
49
+ Number of hidden layers in the Transformer decoder.
50
+ num_attention_heads (`int`, *optional*, defaults to 32):
51
+ Number of attention heads for each attention layer in the Transformer decoder.
52
+ num_key_value_heads (`int`, *optional*):
53
+ This is the number of key_value heads that should be used to implement Grouped Query Attention. If
54
+ `num_key_value_heads=num_attention_heads`, the model will use Multi Head Attention (MHA), if
55
+ `num_key_value_heads=1 the model will use Multi Query Attention (MQA) otherwise GQA is used. When
56
+ converting a multi-head checkpoint to a GQA checkpoint, each group key and value head should be constructed
57
+ by meanpooling all the original heads within that group. For more details checkout [this
58
+ paper](https://arxiv.org/pdf/2305.13245.pdf). If it is not specified, will default to
59
+ `num_attention_heads`.
60
+ resid_pdrop (`float`, *optional*, defaults to 0.0):
61
+ Dropout probability for mlp outputs.
62
+ embd_pdrop (`int`, *optional*, defaults to 0.0):
63
+ The dropout ratio for the embeddings.
64
+ attention_dropout (`float`, *optional*, defaults to 0.0):
65
+ The dropout ratio after computing the attention scores.
66
+ hidden_act (`str` or `function`, *optional*, defaults to `"gelu_new"`):
67
+ The non-linear activation function (function or string) in the decoder.
68
+ max_position_embeddings (`int`, *optional*, defaults to 2048):
69
+ The maximum sequence length that this model might ever be used with. Phi-1 and Phi-1.5 supports up to 2048
70
+ tokens.
71
+ initializer_range (`float`, *optional*, defaults to 0.02):
72
+ The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
73
+ layer_norm_eps (`float`, *optional*, defaults to 1e-05):
74
+ The epsilon used by the rms normalization layers.
75
+ use_cache (`bool`, *optional*, defaults to `True`):
76
+ Whether or not the model should return the last key/values attentions (not used by all models). Only
77
+ relevant if `config.is_decoder=True`. Whether to tie weight embeddings or not.
78
+ tie_word_embeddings (`bool`, *optional*, defaults to `False`):
79
+ Whether to tie weight embeddings
80
+ rope_theta (`float`, *optional*, defaults to 10000.0):
81
+ The base period of the RoPE embeddings.
82
+ rope_scaling (`Dict`, *optional*):
83
+ Dictionary containing the scaling configuration for the RoPE embeddings. Currently supports two scaling
84
+ strategies: linear and dynamic. Their scaling factor must be an float greater than 1. The expected format
85
+ is `{"type": strategy name, "factor": scaling factor}`. When using this flag, don't update
86
+ `max_position_embeddings` to the expected new maximum. See the following thread for more information on how
87
+ these scaling strategies behave:
88
+ https://www.reddit.com/r/LocalPersimmon/comments/14mrgpr/dynamically_scaled_rope_further_increases/. This
89
+ is an experimental feature, subject to breaking API changes in future versions.
90
+ partial_rotary_factor (`float`, *optional*, defaults to 0.5):
91
+ Percentage of the query and keys which will have rotary embedding.
92
+ qk_layernorm (`bool`, *optional*, defaults to `False`):
93
+ Whether or not to normalize the Queries and Keys after projecting the hidden states.
94
+ bos_token_id (`int`, *optional*, defaults to 1):
95
+ Denotes beginning of sequences token id.
96
+ eos_token_id (`int`, *optional*, defaults to 2):
97
+ Denotes end of sequences token id.
98
+
99
+ Example:
100
+
101
+ ```python
102
+ >>> from transformers import PhiModel, PhiConfig
103
+
104
+ >>> # Initializing a Phi-1 style configuration
105
+ >>> configuration = PhiConfig.from_pretrained("microsoft/phi-1")
106
+
107
+ >>> # Initializing a model from the configuration
108
+ >>> model = PhiModel(configuration)
109
+
110
+ >>> # Accessing the model configuration
111
+ >>> configuration = model.config
112
+ ```"""
113
+
114
+ model_type = "phi"
115
+ keys_to_ignore_at_inference = ["past_key_values"]
116
+
117
+ def __init__(
118
+ self,
119
+ vocab_size=51200,
120
+ hidden_size=2048,
121
+ intermediate_size=8192,
122
+ num_hidden_layers=24,
123
+ num_attention_heads=32,
124
+ num_key_value_heads=None,
125
+ resid_pdrop=0.0,
126
+ embd_pdrop=0.0,
127
+ attention_dropout=0.0,
128
+ hidden_act="gelu_new",
129
+ max_position_embeddings=2048,
130
+ initializer_range=0.02,
131
+ layer_norm_eps=1e-5,
132
+ use_cache=True,
133
+ tie_word_embeddings=False,
134
+ rope_theta=10000.0,
135
+ rope_scaling=None,
136
+ partial_rotary_factor=0.5,
137
+ qk_layernorm=False,
138
+ bos_token_id=1,
139
+ eos_token_id=2,
140
+ **kwargs,
141
+ ):
142
+ self.vocab_size = vocab_size
143
+ self.hidden_size = hidden_size
144
+ self.intermediate_size = intermediate_size
145
+ self.num_hidden_layers = num_hidden_layers
146
+ self.num_attention_heads = num_attention_heads
147
+
148
+ if num_key_value_heads is None:
149
+ num_key_value_heads = num_attention_heads
150
+
151
+ self.num_key_value_heads = num_key_value_heads
152
+ self.resid_pdrop = resid_pdrop
153
+ self.embd_pdrop = embd_pdrop
154
+ self.attention_dropout = attention_dropout
155
+ self.hidden_act = hidden_act
156
+ self.max_position_embeddings = max_position_embeddings
157
+ self.initializer_range = initializer_range
158
+ self.layer_norm_eps = layer_norm_eps
159
+ self.use_cache = use_cache
160
+ self.rope_theta = rope_theta
161
+ self.rope_scaling = rope_scaling
162
+ self.partial_rotary_factor = partial_rotary_factor
163
+ self.qk_layernorm = qk_layernorm
164
+ self._rope_scaling_validation()
165
+
166
+ super().__init__(
167
+ bos_token_id=bos_token_id,
168
+ eos_token_id=eos_token_id,
169
+ tie_word_embeddings=tie_word_embeddings,
170
+ **kwargs,
171
+ )
172
+
173
+ # Copied from transformers.models.llama.configuration_llama.LlamaConfig._rope_scaling_validation
174
+ def _rope_scaling_validation(self):
175
+ """
176
+ Validate the `rope_scaling` configuration.
177
+ """
178
+ if self.rope_scaling is None:
179
+ return
180
+
181
+ if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2:
182
+ raise ValueError(
183
+ "`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, "
184
+ f"got {self.rope_scaling}"
185
+ )
186
+ rope_scaling_type = self.rope_scaling.get("type", None)
187
+ rope_scaling_factor = self.rope_scaling.get("factor", None)
188
+ if rope_scaling_type is None or rope_scaling_type not in ["linear", "dynamic"]:
189
+ raise ValueError(
190
+ f"`rope_scaling`'s type field must be one of ['linear', 'dynamic'], got {rope_scaling_type}"
191
+ )
192
+ if rope_scaling_factor is None or not isinstance(rope_scaling_factor, float) or rope_scaling_factor <= 1.0:
193
+ raise ValueError(f"`rope_scaling`'s factor field must be a float > 1, got {rope_scaling_factor}")
anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76e6778705ad6bc1796441dd9809a68c0c029f69442598f3e01630bd2ca826dc
3
+ size 1387533419
anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json ADDED
@@ -0,0 +1,323 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "50257": {
13
+ "content": " ",
14
+ "lstrip": false,
15
+ "normalized": true,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": false
19
+ },
20
+ "50258": {
21
+ "content": " ",
22
+ "lstrip": false,
23
+ "normalized": true,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": false
27
+ },
28
+ "50259": {
29
+ "content": " ",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": false
35
+ },
36
+ "50260": {
37
+ "content": " ",
38
+ "lstrip": false,
39
+ "normalized": true,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": false
43
+ },
44
+ "50261": {
45
+ "content": " ",
46
+ "lstrip": false,
47
+ "normalized": true,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": false
51
+ },
52
+ "50262": {
53
+ "content": " ",
54
+ "lstrip": false,
55
+ "normalized": true,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": false
59
+ },
60
+ "50263": {
61
+ "content": " ",
62
+ "lstrip": false,
63
+ "normalized": true,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": false
67
+ },
68
+ "50264": {
69
+ "content": " ",
70
+ "lstrip": false,
71
+ "normalized": true,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": false
75
+ },
76
+ "50265": {
77
+ "content": " ",
78
+ "lstrip": false,
79
+ "normalized": true,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": false
83
+ },
84
+ "50266": {
85
+ "content": " ",
86
+ "lstrip": false,
87
+ "normalized": true,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": false
91
+ },
92
+ "50267": {
93
+ "content": " ",
94
+ "lstrip": false,
95
+ "normalized": true,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": false
99
+ },
100
+ "50268": {
101
+ "content": " ",
102
+ "lstrip": false,
103
+ "normalized": true,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": false
107
+ },
108
+ "50269": {
109
+ "content": " ",
110
+ "lstrip": false,
111
+ "normalized": true,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": false
115
+ },
116
+ "50270": {
117
+ "content": " ",
118
+ "lstrip": false,
119
+ "normalized": true,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": false
123
+ },
124
+ "50271": {
125
+ "content": " ",
126
+ "lstrip": false,
127
+ "normalized": true,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": false
131
+ },
132
+ "50272": {
133
+ "content": " ",
134
+ "lstrip": false,
135
+ "normalized": true,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": false
139
+ },
140
+ "50273": {
141
+ "content": " ",
142
+ "lstrip": false,
143
+ "normalized": true,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": false
147
+ },
148
+ "50274": {
149
+ "content": " ",
150
+ "lstrip": false,
151
+ "normalized": true,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": false
155
+ },
156
+ "50275": {
157
+ "content": " ",
158
+ "lstrip": false,
159
+ "normalized": true,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": false
163
+ },
164
+ "50276": {
165
+ "content": " ",
166
+ "lstrip": false,
167
+ "normalized": true,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": false
171
+ },
172
+ "50277": {
173
+ "content": " ",
174
+ "lstrip": false,
175
+ "normalized": true,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": false
179
+ },
180
+ "50278": {
181
+ "content": " ",
182
+ "lstrip": false,
183
+ "normalized": true,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": false
187
+ },
188
+ "50279": {
189
+ "content": " ",
190
+ "lstrip": false,
191
+ "normalized": true,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": false
195
+ },
196
+ "50280": {
197
+ "content": " ",
198
+ "lstrip": false,
199
+ "normalized": true,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": false
203
+ },
204
+ "50281": {
205
+ "content": " ",
206
+ "lstrip": false,
207
+ "normalized": true,
208
+ "rstrip": false,
209
+ "single_word": false,
210
+ "special": false
211
+ },
212
+ "50282": {
213
+ "content": " ",
214
+ "lstrip": false,
215
+ "normalized": true,
216
+ "rstrip": false,
217
+ "single_word": false,
218
+ "special": false
219
+ },
220
+ "50283": {
221
+ "content": " ",
222
+ "lstrip": false,
223
+ "normalized": true,
224
+ "rstrip": false,
225
+ "single_word": false,
226
+ "special": false
227
+ },
228
+ "50284": {
229
+ "content": " ",
230
+ "lstrip": false,
231
+ "normalized": true,
232
+ "rstrip": false,
233
+ "single_word": false,
234
+ "special": false
235
+ },
236
+ "50285": {
237
+ "content": " ",
238
+ "lstrip": false,
239
+ "normalized": true,
240
+ "rstrip": false,
241
+ "single_word": false,
242
+ "special": false
243
+ },
244
+ "50286": {
245
+ "content": " ",
246
+ "lstrip": false,
247
+ "normalized": true,
248
+ "rstrip": false,
249
+ "single_word": false,
250
+ "special": false
251
+ },
252
+ "50287": {
253
+ "content": "\t\t\t\t\t\t\t\t\t",
254
+ "lstrip": false,
255
+ "normalized": true,
256
+ "rstrip": false,
257
+ "single_word": false,
258
+ "special": false
259
+ },
260
+ "50288": {
261
+ "content": "\t\t\t\t\t\t\t\t",
262
+ "lstrip": false,
263
+ "normalized": true,
264
+ "rstrip": false,
265
+ "single_word": false,
266
+ "special": false
267
+ },
268
+ "50289": {
269
+ "content": "\t\t\t\t\t\t\t",
270
+ "lstrip": false,
271
+ "normalized": true,
272
+ "rstrip": false,
273
+ "single_word": false,
274
+ "special": false
275
+ },
276
+ "50290": {
277
+ "content": "\t\t\t\t\t\t",
278
+ "lstrip": false,
279
+ "normalized": true,
280
+ "rstrip": false,
281
+ "single_word": false,
282
+ "special": false
283
+ },
284
+ "50291": {
285
+ "content": "\t\t\t\t\t",
286
+ "lstrip": false,
287
+ "normalized": true,
288
+ "rstrip": false,
289
+ "single_word": false,
290
+ "special": false
291
+ },
292
+ "50292": {
293
+ "content": "\t\t\t\t",
294
+ "lstrip": false,
295
+ "normalized": true,
296
+ "rstrip": false,
297
+ "single_word": false,
298
+ "special": false
299
+ },
300
+ "50293": {
301
+ "content": "\t\t\t",
302
+ "lstrip": false,
303
+ "normalized": true,
304
+ "rstrip": false,
305
+ "single_word": false,
306
+ "special": false
307
+ },
308
+ "50294": {
309
+ "content": "\t\t",
310
+ "lstrip": false,
311
+ "normalized": true,
312
+ "rstrip": false,
313
+ "single_word": false,
314
+ "special": false
315
+ }
316
+ },
317
+ "bos_token": "<|endoftext|>",
318
+ "clean_up_tokenization_spaces": true,
319
+ "eos_token": "<|endoftext|>",
320
+ "model_max_length": 2048,
321
+ "tokenizer_class": "CodeGenTokenizer",
322
+ "unk_token": "<|endoftext|>"
323
+ }
anyprec-phi-1_5-dns-3.74-4.63-w4_orig2-gc1-c4_s100_blk512/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
anyprec-stablelm-zephyr-3b-dns-3.41-2.5625-w4_orig2-gc1-c4_s100_blk512/config.json ADDED
@@ -0,0 +1,273 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "checkpoints/stabilityai/stablelm-zephyr-3b",
3
+ "anyprec": {
4
+ "arch_config": {
5
+ "layers_name": "layers",
6
+ "model_name": "model",
7
+ "module_names": [
8
+ "self_attn.q_proj",
9
+ "self_attn.k_proj",
10
+ "self_attn.v_proj",
11
+ "self_attn.o_proj",
12
+ "mlp.gate_proj",
13
+ "mlp.up_proj",
14
+ "mlp.down_proj"
15
+ ]
16
+ },
17
+ "group_count": 1,
18
+ "parent_precision": 4,
19
+ "seed_precision": 2,
20
+ "sparse_numvals": {
21
+ "model.layers.0.mlp.down_proj": 361261,
22
+ "model.layers.0.mlp.gate_proj": 409239,
23
+ "model.layers.0.mlp.up_proj": 382725,
24
+ "model.layers.0.self_attn.k_proj": 1249422,
25
+ "model.layers.0.self_attn.o_proj": 493934,
26
+ "model.layers.0.self_attn.q_proj": 1383514,
27
+ "model.layers.0.self_attn.v_proj": 582091,
28
+ "model.layers.1.mlp.down_proj": 371941,
29
+ "model.layers.1.mlp.gate_proj": 312127,
30
+ "model.layers.1.mlp.up_proj": 350541,
31
+ "model.layers.1.self_attn.k_proj": 739147,
32
+ "model.layers.1.self_attn.o_proj": 870440,
33
+ "model.layers.1.self_attn.q_proj": 754729,
34
+ "model.layers.1.self_attn.v_proj": 790019,
35
+ "model.layers.10.mlp.down_proj": 400494,
36
+ "model.layers.10.mlp.gate_proj": 437481,
37
+ "model.layers.10.mlp.up_proj": 389717,
38
+ "model.layers.10.self_attn.k_proj": 217707,
39
+ "model.layers.10.self_attn.o_proj": 159623,
40
+ "model.layers.10.self_attn.q_proj": 204767,
41
+ "model.layers.10.self_attn.v_proj": 168686,
42
+ "model.layers.11.mlp.down_proj": 398251,
43
+ "model.layers.11.mlp.gate_proj": 468437,
44
+ "model.layers.11.mlp.up_proj": 399514,
45
+ "model.layers.11.self_attn.k_proj": 223137,
46
+ "model.layers.11.self_attn.o_proj": 153385,
47
+ "model.layers.11.self_attn.q_proj": 207469,
48
+ "model.layers.11.self_attn.v_proj": 157690,
49
+ "model.layers.12.mlp.down_proj": 392789,
50
+ "model.layers.12.mlp.gate_proj": 511766,
51
+ "model.layers.12.mlp.up_proj": 388221,
52
+ "model.layers.12.self_attn.k_proj": 195398,
53
+ "model.layers.12.self_attn.o_proj": 142264,
54
+ "model.layers.12.self_attn.q_proj": 185287,
55
+ "model.layers.12.self_attn.v_proj": 152926,
56
+ "model.layers.13.mlp.down_proj": 395950,
57
+ "model.layers.13.mlp.gate_proj": 495319,
58
+ "model.layers.13.mlp.up_proj": 404143,
59
+ "model.layers.13.self_attn.k_proj": 218597,
60
+ "model.layers.13.self_attn.o_proj": 159306,
61
+ "model.layers.13.self_attn.q_proj": 206129,
62
+ "model.layers.13.self_attn.v_proj": 168994,
63
+ "model.layers.14.mlp.down_proj": 395967,
64
+ "model.layers.14.mlp.gate_proj": 562906,
65
+ "model.layers.14.mlp.up_proj": 390940,
66
+ "model.layers.14.self_attn.k_proj": 194047,
67
+ "model.layers.14.self_attn.o_proj": 145953,
68
+ "model.layers.14.self_attn.q_proj": 182838,
69
+ "model.layers.14.self_attn.v_proj": 153758,
70
+ "model.layers.15.mlp.down_proj": 386374,
71
+ "model.layers.15.mlp.gate_proj": 522112,
72
+ "model.layers.15.mlp.up_proj": 395074,
73
+ "model.layers.15.self_attn.k_proj": 196997,
74
+ "model.layers.15.self_attn.o_proj": 150452,
75
+ "model.layers.15.self_attn.q_proj": 184345,
76
+ "model.layers.15.self_attn.v_proj": 156440,
77
+ "model.layers.16.mlp.down_proj": 384573,
78
+ "model.layers.16.mlp.gate_proj": 477452,
79
+ "model.layers.16.mlp.up_proj": 386693,
80
+ "model.layers.16.self_attn.k_proj": 215088,
81
+ "model.layers.16.self_attn.o_proj": 145176,
82
+ "model.layers.16.self_attn.q_proj": 193489,
83
+ "model.layers.16.self_attn.v_proj": 157381,
84
+ "model.layers.17.mlp.down_proj": 376292,
85
+ "model.layers.17.mlp.gate_proj": 440742,
86
+ "model.layers.17.mlp.up_proj": 372740,
87
+ "model.layers.17.self_attn.k_proj": 198626,
88
+ "model.layers.17.self_attn.o_proj": 138659,
89
+ "model.layers.17.self_attn.q_proj": 184019,
90
+ "model.layers.17.self_attn.v_proj": 146408,
91
+ "model.layers.18.mlp.down_proj": 372954,
92
+ "model.layers.18.mlp.gate_proj": 407526,
93
+ "model.layers.18.mlp.up_proj": 359072,
94
+ "model.layers.18.self_attn.k_proj": 195759,
95
+ "model.layers.18.self_attn.o_proj": 143198,
96
+ "model.layers.18.self_attn.q_proj": 178705,
97
+ "model.layers.18.self_attn.v_proj": 154099,
98
+ "model.layers.19.mlp.down_proj": 372170,
99
+ "model.layers.19.mlp.gate_proj": 404678,
100
+ "model.layers.19.mlp.up_proj": 362693,
101
+ "model.layers.19.self_attn.k_proj": 181548,
102
+ "model.layers.19.self_attn.o_proj": 141818,
103
+ "model.layers.19.self_attn.q_proj": 167249,
104
+ "model.layers.19.self_attn.v_proj": 144673,
105
+ "model.layers.2.mlp.down_proj": 343350,
106
+ "model.layers.2.mlp.gate_proj": 318733,
107
+ "model.layers.2.mlp.up_proj": 347181,
108
+ "model.layers.2.self_attn.k_proj": 727364,
109
+ "model.layers.2.self_attn.o_proj": 206116,
110
+ "model.layers.2.self_attn.q_proj": 632838,
111
+ "model.layers.2.self_attn.v_proj": 178305,
112
+ "model.layers.20.mlp.down_proj": 358950,
113
+ "model.layers.20.mlp.gate_proj": 407599,
114
+ "model.layers.20.mlp.up_proj": 359506,
115
+ "model.layers.20.self_attn.k_proj": 174602,
116
+ "model.layers.20.self_attn.o_proj": 139503,
117
+ "model.layers.20.self_attn.q_proj": 169296,
118
+ "model.layers.20.self_attn.v_proj": 136204,
119
+ "model.layers.21.mlp.down_proj": 357960,
120
+ "model.layers.21.mlp.gate_proj": 385992,
121
+ "model.layers.21.mlp.up_proj": 348288,
122
+ "model.layers.21.self_attn.k_proj": 185211,
123
+ "model.layers.21.self_attn.o_proj": 142190,
124
+ "model.layers.21.self_attn.q_proj": 176282,
125
+ "model.layers.21.self_attn.v_proj": 142371,
126
+ "model.layers.22.mlp.down_proj": 361293,
127
+ "model.layers.22.mlp.gate_proj": 377918,
128
+ "model.layers.22.mlp.up_proj": 353747,
129
+ "model.layers.22.self_attn.k_proj": 179724,
130
+ "model.layers.22.self_attn.o_proj": 143106,
131
+ "model.layers.22.self_attn.q_proj": 167648,
132
+ "model.layers.22.self_attn.v_proj": 145138,
133
+ "model.layers.23.mlp.down_proj": 358986,
134
+ "model.layers.23.mlp.gate_proj": 378110,
135
+ "model.layers.23.mlp.up_proj": 356957,
136
+ "model.layers.23.self_attn.k_proj": 178196,
137
+ "model.layers.23.self_attn.o_proj": 142376,
138
+ "model.layers.23.self_attn.q_proj": 168788,
139
+ "model.layers.23.self_attn.v_proj": 143664,
140
+ "model.layers.24.mlp.down_proj": 354332,
141
+ "model.layers.24.mlp.gate_proj": 366565,
142
+ "model.layers.24.mlp.up_proj": 346526,
143
+ "model.layers.24.self_attn.k_proj": 185428,
144
+ "model.layers.24.self_attn.o_proj": 140813,
145
+ "model.layers.24.self_attn.q_proj": 176689,
146
+ "model.layers.24.self_attn.v_proj": 139483,
147
+ "model.layers.25.mlp.down_proj": 353602,
148
+ "model.layers.25.mlp.gate_proj": 343542,
149
+ "model.layers.25.mlp.up_proj": 345700,
150
+ "model.layers.25.self_attn.k_proj": 178173,
151
+ "model.layers.25.self_attn.o_proj": 146206,
152
+ "model.layers.25.self_attn.q_proj": 166860,
153
+ "model.layers.25.self_attn.v_proj": 144246,
154
+ "model.layers.26.mlp.down_proj": 356005,
155
+ "model.layers.26.mlp.gate_proj": 339690,
156
+ "model.layers.26.mlp.up_proj": 347487,
157
+ "model.layers.26.self_attn.k_proj": 196789,
158
+ "model.layers.26.self_attn.o_proj": 140962,
159
+ "model.layers.26.self_attn.q_proj": 179600,
160
+ "model.layers.26.self_attn.v_proj": 145076,
161
+ "model.layers.27.mlp.down_proj": 354147,
162
+ "model.layers.27.mlp.gate_proj": 333874,
163
+ "model.layers.27.mlp.up_proj": 342862,
164
+ "model.layers.27.self_attn.k_proj": 191073,
165
+ "model.layers.27.self_attn.o_proj": 138949,
166
+ "model.layers.27.self_attn.q_proj": 174841,
167
+ "model.layers.27.self_attn.v_proj": 139074,
168
+ "model.layers.28.mlp.down_proj": 350013,
169
+ "model.layers.28.mlp.gate_proj": 343786,
170
+ "model.layers.28.mlp.up_proj": 341398,
171
+ "model.layers.28.self_attn.k_proj": 193596,
172
+ "model.layers.28.self_attn.o_proj": 141809,
173
+ "model.layers.28.self_attn.q_proj": 174614,
174
+ "model.layers.28.self_attn.v_proj": 143142,
175
+ "model.layers.29.mlp.down_proj": 356645,
176
+ "model.layers.29.mlp.gate_proj": 382614,
177
+ "model.layers.29.mlp.up_proj": 351816,
178
+ "model.layers.29.self_attn.k_proj": 192295,
179
+ "model.layers.29.self_attn.o_proj": 147619,
180
+ "model.layers.29.self_attn.q_proj": 174798,
181
+ "model.layers.29.self_attn.v_proj": 148455,
182
+ "model.layers.3.mlp.down_proj": 353085,
183
+ "model.layers.3.mlp.gate_proj": 310395,
184
+ "model.layers.3.mlp.up_proj": 352469,
185
+ "model.layers.3.self_attn.k_proj": 329393,
186
+ "model.layers.3.self_attn.o_proj": 179470,
187
+ "model.layers.3.self_attn.q_proj": 307652,
188
+ "model.layers.3.self_attn.v_proj": 163581,
189
+ "model.layers.30.mlp.down_proj": 380293,
190
+ "model.layers.30.mlp.gate_proj": 435280,
191
+ "model.layers.30.mlp.up_proj": 352518,
192
+ "model.layers.30.self_attn.k_proj": 178469,
193
+ "model.layers.30.self_attn.o_proj": 156350,
194
+ "model.layers.30.self_attn.q_proj": 165420,
195
+ "model.layers.30.self_attn.v_proj": 157109,
196
+ "model.layers.31.mlp.down_proj": 407998,
197
+ "model.layers.31.mlp.gate_proj": 478129,
198
+ "model.layers.31.mlp.up_proj": 360125,
199
+ "model.layers.31.self_attn.k_proj": 162466,
200
+ "model.layers.31.self_attn.o_proj": 180364,
201
+ "model.layers.31.self_attn.q_proj": 160350,
202
+ "model.layers.31.self_attn.v_proj": 166443,
203
+ "model.layers.4.mlp.down_proj": 351602,
204
+ "model.layers.4.mlp.gate_proj": 303771,
205
+ "model.layers.4.mlp.up_proj": 351399,
206
+ "model.layers.4.self_attn.k_proj": 269033,
207
+ "model.layers.4.self_attn.o_proj": 159082,
208
+ "model.layers.4.self_attn.q_proj": 257637,
209
+ "model.layers.4.self_attn.v_proj": 156983,
210
+ "model.layers.5.mlp.down_proj": 350684,
211
+ "model.layers.5.mlp.gate_proj": 325463,
212
+ "model.layers.5.mlp.up_proj": 347872,
213
+ "model.layers.5.self_attn.k_proj": 237619,
214
+ "model.layers.5.self_attn.o_proj": 179358,
215
+ "model.layers.5.self_attn.q_proj": 227821,
216
+ "model.layers.5.self_attn.v_proj": 191027,
217
+ "model.layers.6.mlp.down_proj": 364690,
218
+ "model.layers.6.mlp.gate_proj": 341908,
219
+ "model.layers.6.mlp.up_proj": 356264,
220
+ "model.layers.6.self_attn.k_proj": 200776,
221
+ "model.layers.6.self_attn.o_proj": 144559,
222
+ "model.layers.6.self_attn.q_proj": 189252,
223
+ "model.layers.6.self_attn.v_proj": 163452,
224
+ "model.layers.7.mlp.down_proj": 367623,
225
+ "model.layers.7.mlp.gate_proj": 355908,
226
+ "model.layers.7.mlp.up_proj": 361007,
227
+ "model.layers.7.self_attn.k_proj": 192816,
228
+ "model.layers.7.self_attn.o_proj": 141675,
229
+ "model.layers.7.self_attn.q_proj": 182592,
230
+ "model.layers.7.self_attn.v_proj": 147459,
231
+ "model.layers.8.mlp.down_proj": 381346,
232
+ "model.layers.8.mlp.gate_proj": 392814,
233
+ "model.layers.8.mlp.up_proj": 385174,
234
+ "model.layers.8.self_attn.k_proj": 214820,
235
+ "model.layers.8.self_attn.o_proj": 148998,
236
+ "model.layers.8.self_attn.q_proj": 205237,
237
+ "model.layers.8.self_attn.v_proj": 154614,
238
+ "model.layers.9.mlp.down_proj": 384446,
239
+ "model.layers.9.mlp.gate_proj": 408833,
240
+ "model.layers.9.mlp.up_proj": 386970,
241
+ "model.layers.9.self_attn.k_proj": 203316,
242
+ "model.layers.9.self_attn.o_proj": 150194,
243
+ "model.layers.9.self_attn.q_proj": 197655,
244
+ "model.layers.9.self_attn.v_proj": 157709
245
+ }
246
+ },
247
+ "architectures": [
248
+ "StableLmForCausalLM"
249
+ ],
250
+ "attention_dropout": 0.0,
251
+ "bos_token_id": 0,
252
+ "eos_token_id": 0,
253
+ "hidden_act": "silu",
254
+ "hidden_dropout": 0.0,
255
+ "hidden_size": 2560,
256
+ "initializer_range": 0.02,
257
+ "intermediate_size": 6912,
258
+ "layer_norm_eps": 1e-05,
259
+ "max_position_embeddings": 4096,
260
+ "model_type": "stablelm",
261
+ "num_attention_heads": 32,
262
+ "num_hidden_layers": 32,
263
+ "num_key_value_heads": 32,
264
+ "partial_rotary_factor": 0.25,
265
+ "rope_scaling": null,
266
+ "rope_theta": 10000,
267
+ "tie_word_embeddings": false,
268
+ "torch_dtype": "float16",
269
+ "transformers_version": "4.39.3",
270
+ "use_cache": true,
271
+ "use_qkv_bias": false,
272
+ "vocab_size": 50304
273
+ }
anyprec-stablelm-zephyr-3b-dns-3.41-2.5625-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72cc534f07c12f489757af6800791c21d0e9a25b032113a4ae4b24a9003c6270
3
+ size 2226904086
anyprec-stablelm-zephyr-3b-dns-3.41-2.5625-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<|endoftext|>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
anyprec-stablelm-zephyr-3b-dns-3.41-2.5625-w4_orig2-gc1-c4_s100_blk512/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
anyprec-stablelm-zephyr-3b-dns-3.41-2.5625-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json ADDED
@@ -0,0 +1,213 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|padding|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "50254": {
21
+ "content": " ",
22
+ "lstrip": false,
23
+ "normalized": true,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": false
27
+ },
28
+ "50255": {
29
+ "content": " ",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": false
35
+ },
36
+ "50256": {
37
+ "content": " ",
38
+ "lstrip": false,
39
+ "normalized": true,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": false
43
+ },
44
+ "50257": {
45
+ "content": " ",
46
+ "lstrip": false,
47
+ "normalized": true,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": false
51
+ },
52
+ "50258": {
53
+ "content": " ",
54
+ "lstrip": false,
55
+ "normalized": true,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": false
59
+ },
60
+ "50259": {
61
+ "content": " ",
62
+ "lstrip": false,
63
+ "normalized": true,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": false
67
+ },
68
+ "50260": {
69
+ "content": " ",
70
+ "lstrip": false,
71
+ "normalized": true,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": false
75
+ },
76
+ "50261": {
77
+ "content": " ",
78
+ "lstrip": false,
79
+ "normalized": true,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": false
83
+ },
84
+ "50262": {
85
+ "content": " ",
86
+ "lstrip": false,
87
+ "normalized": true,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": false
91
+ },
92
+ "50263": {
93
+ "content": " ",
94
+ "lstrip": false,
95
+ "normalized": true,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": false
99
+ },
100
+ "50264": {
101
+ "content": " ",
102
+ "lstrip": false,
103
+ "normalized": true,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": false
107
+ },
108
+ "50265": {
109
+ "content": " ",
110
+ "lstrip": false,
111
+ "normalized": true,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": false
115
+ },
116
+ "50266": {
117
+ "content": " ",
118
+ "lstrip": false,
119
+ "normalized": true,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": false
123
+ },
124
+ "50267": {
125
+ "content": " ",
126
+ "lstrip": false,
127
+ "normalized": true,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": false
131
+ },
132
+ "50268": {
133
+ "content": " ",
134
+ "lstrip": false,
135
+ "normalized": true,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": false
139
+ },
140
+ "50269": {
141
+ "content": " ",
142
+ "lstrip": false,
143
+ "normalized": true,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": false
147
+ },
148
+ "50270": {
149
+ "content": " ",
150
+ "lstrip": false,
151
+ "normalized": true,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": false
155
+ },
156
+ "50271": {
157
+ "content": " ",
158
+ "lstrip": false,
159
+ "normalized": true,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": false
163
+ },
164
+ "50272": {
165
+ "content": " ",
166
+ "lstrip": false,
167
+ "normalized": true,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": false
171
+ },
172
+ "50273": {
173
+ "content": " ",
174
+ "lstrip": false,
175
+ "normalized": true,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": false
179
+ },
180
+ "50274": {
181
+ "content": " ",
182
+ "lstrip": false,
183
+ "normalized": true,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": false
187
+ },
188
+ "50275": {
189
+ "content": " ",
190
+ "lstrip": false,
191
+ "normalized": true,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": false
195
+ },
196
+ "50276": {
197
+ "content": " ",
198
+ "lstrip": false,
199
+ "normalized": true,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": false
203
+ }
204
+ },
205
+ "bos_token": "<|endoftext|>",
206
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
207
+ "clean_up_tokenization_spaces": true,
208
+ "eos_token": "<|endoftext|>",
209
+ "model_max_length": 2048,
210
+ "pad_token": "<|endoftext|>",
211
+ "tokenizer_class": "GPTNeoXTokenizer",
212
+ "unk_token": "<|endoftext|>"
213
+ }
anyprec-stablelm-zephyr-3b-dns-3.79-4.9365-w4_orig2-gc1-c4_s100_blk512/config.json ADDED
@@ -0,0 +1,273 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "checkpoints/stabilityai/stablelm-zephyr-3b",
3
+ "anyprec": {
4
+ "arch_config": {
5
+ "layers_name": "layers",
6
+ "model_name": "model",
7
+ "module_names": [
8
+ "self_attn.q_proj",
9
+ "self_attn.k_proj",
10
+ "self_attn.v_proj",
11
+ "self_attn.o_proj",
12
+ "mlp.gate_proj",
13
+ "mlp.up_proj",
14
+ "mlp.down_proj"
15
+ ]
16
+ },
17
+ "group_count": 1,
18
+ "parent_precision": 4,
19
+ "seed_precision": 2,
20
+ "sparse_numvals": {
21
+ "model.layers.0.mlp.down_proj": 778629,
22
+ "model.layers.0.mlp.gate_proj": 807734,
23
+ "model.layers.0.mlp.up_proj": 803213,
24
+ "model.layers.0.self_attn.k_proj": 1461169,
25
+ "model.layers.0.self_attn.o_proj": 640692,
26
+ "model.layers.0.self_attn.q_proj": 1592355,
27
+ "model.layers.0.self_attn.v_proj": 759195,
28
+ "model.layers.1.mlp.down_proj": 780894,
29
+ "model.layers.1.mlp.gate_proj": 663948,
30
+ "model.layers.1.mlp.up_proj": 770295,
31
+ "model.layers.1.self_attn.k_proj": 928415,
32
+ "model.layers.1.self_attn.o_proj": 1132444,
33
+ "model.layers.1.self_attn.q_proj": 943481,
34
+ "model.layers.1.self_attn.v_proj": 1042300,
35
+ "model.layers.10.mlp.down_proj": 817033,
36
+ "model.layers.10.mlp.gate_proj": 830536,
37
+ "model.layers.10.mlp.up_proj": 823874,
38
+ "model.layers.10.self_attn.k_proj": 390605,
39
+ "model.layers.10.self_attn.o_proj": 317140,
40
+ "model.layers.10.self_attn.q_proj": 376199,
41
+ "model.layers.10.self_attn.v_proj": 334549,
42
+ "model.layers.11.mlp.down_proj": 833947,
43
+ "model.layers.11.mlp.gate_proj": 868719,
44
+ "model.layers.11.mlp.up_proj": 816985,
45
+ "model.layers.11.self_attn.k_proj": 397474,
46
+ "model.layers.11.self_attn.o_proj": 311273,
47
+ "model.layers.11.self_attn.q_proj": 377782,
48
+ "model.layers.11.self_attn.v_proj": 312409,
49
+ "model.layers.12.mlp.down_proj": 828829,
50
+ "model.layers.12.mlp.gate_proj": 917976,
51
+ "model.layers.12.mlp.up_proj": 823316,
52
+ "model.layers.12.self_attn.k_proj": 361035,
53
+ "model.layers.12.self_attn.o_proj": 302116,
54
+ "model.layers.12.self_attn.q_proj": 351978,
55
+ "model.layers.12.self_attn.v_proj": 304205,
56
+ "model.layers.13.mlp.down_proj": 827309,
57
+ "model.layers.13.mlp.gate_proj": 900510,
58
+ "model.layers.13.mlp.up_proj": 821878,
59
+ "model.layers.13.self_attn.k_proj": 392417,
60
+ "model.layers.13.self_attn.o_proj": 312342,
61
+ "model.layers.13.self_attn.q_proj": 381601,
62
+ "model.layers.13.self_attn.v_proj": 333882,
63
+ "model.layers.14.mlp.down_proj": 806998,
64
+ "model.layers.14.mlp.gate_proj": 998139,
65
+ "model.layers.14.mlp.up_proj": 824569,
66
+ "model.layers.14.self_attn.k_proj": 359040,
67
+ "model.layers.14.self_attn.o_proj": 303410,
68
+ "model.layers.14.self_attn.q_proj": 348440,
69
+ "model.layers.14.self_attn.v_proj": 319003,
70
+ "model.layers.15.mlp.down_proj": 801124,
71
+ "model.layers.15.mlp.gate_proj": 959583,
72
+ "model.layers.15.mlp.up_proj": 828845,
73
+ "model.layers.15.self_attn.k_proj": 367357,
74
+ "model.layers.15.self_attn.o_proj": 311633,
75
+ "model.layers.15.self_attn.q_proj": 347473,
76
+ "model.layers.15.self_attn.v_proj": 312824,
77
+ "model.layers.16.mlp.down_proj": 794973,
78
+ "model.layers.16.mlp.gate_proj": 893569,
79
+ "model.layers.16.mlp.up_proj": 793791,
80
+ "model.layers.16.self_attn.k_proj": 384201,
81
+ "model.layers.16.self_attn.o_proj": 297904,
82
+ "model.layers.16.self_attn.q_proj": 357001,
83
+ "model.layers.16.self_attn.v_proj": 320214,
84
+ "model.layers.17.mlp.down_proj": 801463,
85
+ "model.layers.17.mlp.gate_proj": 861447,
86
+ "model.layers.17.mlp.up_proj": 792250,
87
+ "model.layers.17.self_attn.k_proj": 368087,
88
+ "model.layers.17.self_attn.o_proj": 291215,
89
+ "model.layers.17.self_attn.q_proj": 352834,
90
+ "model.layers.17.self_attn.v_proj": 298551,
91
+ "model.layers.18.mlp.down_proj": 780022,
92
+ "model.layers.18.mlp.gate_proj": 822135,
93
+ "model.layers.18.mlp.up_proj": 770609,
94
+ "model.layers.18.self_attn.k_proj": 367388,
95
+ "model.layers.18.self_attn.o_proj": 301723,
96
+ "model.layers.18.self_attn.q_proj": 348144,
97
+ "model.layers.18.self_attn.v_proj": 314015,
98
+ "model.layers.19.mlp.down_proj": 781622,
99
+ "model.layers.19.mlp.gate_proj": 809307,
100
+ "model.layers.19.mlp.up_proj": 777374,
101
+ "model.layers.19.self_attn.k_proj": 346614,
102
+ "model.layers.19.self_attn.o_proj": 292359,
103
+ "model.layers.19.self_attn.q_proj": 330734,
104
+ "model.layers.19.self_attn.v_proj": 299820,
105
+ "model.layers.2.mlp.down_proj": 750836,
106
+ "model.layers.2.mlp.gate_proj": 685852,
107
+ "model.layers.2.mlp.up_proj": 760850,
108
+ "model.layers.2.self_attn.k_proj": 945792,
109
+ "model.layers.2.self_attn.o_proj": 383107,
110
+ "model.layers.2.self_attn.q_proj": 849195,
111
+ "model.layers.2.self_attn.v_proj": 344129,
112
+ "model.layers.20.mlp.down_proj": 786966,
113
+ "model.layers.20.mlp.gate_proj": 812499,
114
+ "model.layers.20.mlp.up_proj": 773175,
115
+ "model.layers.20.self_attn.k_proj": 341039,
116
+ "model.layers.20.self_attn.o_proj": 296095,
117
+ "model.layers.20.self_attn.q_proj": 331543,
118
+ "model.layers.20.self_attn.v_proj": 291150,
119
+ "model.layers.21.mlp.down_proj": 782960,
120
+ "model.layers.21.mlp.gate_proj": 792888,
121
+ "model.layers.21.mlp.up_proj": 753523,
122
+ "model.layers.21.self_attn.k_proj": 352377,
123
+ "model.layers.21.self_attn.o_proj": 295497,
124
+ "model.layers.21.self_attn.q_proj": 346059,
125
+ "model.layers.21.self_attn.v_proj": 297082,
126
+ "model.layers.22.mlp.down_proj": 766257,
127
+ "model.layers.22.mlp.gate_proj": 782287,
128
+ "model.layers.22.mlp.up_proj": 764779,
129
+ "model.layers.22.self_attn.k_proj": 341186,
130
+ "model.layers.22.self_attn.o_proj": 301225,
131
+ "model.layers.22.self_attn.q_proj": 329719,
132
+ "model.layers.22.self_attn.v_proj": 296752,
133
+ "model.layers.23.mlp.down_proj": 780103,
134
+ "model.layers.23.mlp.gate_proj": 765576,
135
+ "model.layers.23.mlp.up_proj": 768508,
136
+ "model.layers.23.self_attn.k_proj": 341081,
137
+ "model.layers.23.self_attn.o_proj": 298603,
138
+ "model.layers.23.self_attn.q_proj": 334699,
139
+ "model.layers.23.self_attn.v_proj": 301314,
140
+ "model.layers.24.mlp.down_proj": 767341,
141
+ "model.layers.24.mlp.gate_proj": 751797,
142
+ "model.layers.24.mlp.up_proj": 767790,
143
+ "model.layers.24.self_attn.k_proj": 353380,
144
+ "model.layers.24.self_attn.o_proj": 296557,
145
+ "model.layers.24.self_attn.q_proj": 340550,
146
+ "model.layers.24.self_attn.v_proj": 294305,
147
+ "model.layers.25.mlp.down_proj": 765684,
148
+ "model.layers.25.mlp.gate_proj": 740912,
149
+ "model.layers.25.mlp.up_proj": 742629,
150
+ "model.layers.25.self_attn.k_proj": 342154,
151
+ "model.layers.25.self_attn.o_proj": 300439,
152
+ "model.layers.25.self_attn.q_proj": 333588,
153
+ "model.layers.25.self_attn.v_proj": 303279,
154
+ "model.layers.26.mlp.down_proj": 767616,
155
+ "model.layers.26.mlp.gate_proj": 741326,
156
+ "model.layers.26.mlp.up_proj": 764813,
157
+ "model.layers.26.self_attn.k_proj": 367959,
158
+ "model.layers.26.self_attn.o_proj": 293380,
159
+ "model.layers.26.self_attn.q_proj": 342700,
160
+ "model.layers.26.self_attn.v_proj": 299660,
161
+ "model.layers.27.mlp.down_proj": 762326,
162
+ "model.layers.27.mlp.gate_proj": 736795,
163
+ "model.layers.27.mlp.up_proj": 752581,
164
+ "model.layers.27.self_attn.k_proj": 358916,
165
+ "model.layers.27.self_attn.o_proj": 291015,
166
+ "model.layers.27.self_attn.q_proj": 344621,
167
+ "model.layers.27.self_attn.v_proj": 290539,
168
+ "model.layers.28.mlp.down_proj": 751370,
169
+ "model.layers.28.mlp.gate_proj": 752285,
170
+ "model.layers.28.mlp.up_proj": 766889,
171
+ "model.layers.28.self_attn.k_proj": 367105,
172
+ "model.layers.28.self_attn.o_proj": 293327,
173
+ "model.layers.28.self_attn.q_proj": 337209,
174
+ "model.layers.28.self_attn.v_proj": 294827,
175
+ "model.layers.29.mlp.down_proj": 759893,
176
+ "model.layers.29.mlp.gate_proj": 787132,
177
+ "model.layers.29.mlp.up_proj": 758499,
178
+ "model.layers.29.self_attn.k_proj": 363047,
179
+ "model.layers.29.self_attn.o_proj": 302596,
180
+ "model.layers.29.self_attn.q_proj": 344432,
181
+ "model.layers.29.self_attn.v_proj": 301537,
182
+ "model.layers.3.mlp.down_proj": 765483,
183
+ "model.layers.3.mlp.gate_proj": 658112,
184
+ "model.layers.3.mlp.up_proj": 765970,
185
+ "model.layers.3.self_attn.k_proj": 536020,
186
+ "model.layers.3.self_attn.o_proj": 347045,
187
+ "model.layers.3.self_attn.q_proj": 504570,
188
+ "model.layers.3.self_attn.v_proj": 328300,
189
+ "model.layers.30.mlp.down_proj": 820430,
190
+ "model.layers.30.mlp.gate_proj": 839540,
191
+ "model.layers.30.mlp.up_proj": 768724,
192
+ "model.layers.30.self_attn.k_proj": 345312,
193
+ "model.layers.30.self_attn.o_proj": 313055,
194
+ "model.layers.30.self_attn.q_proj": 331580,
195
+ "model.layers.30.self_attn.v_proj": 313570,
196
+ "model.layers.31.mlp.down_proj": 851291,
197
+ "model.layers.31.mlp.gate_proj": 904549,
198
+ "model.layers.31.mlp.up_proj": 763949,
199
+ "model.layers.31.self_attn.k_proj": 326266,
200
+ "model.layers.31.self_attn.o_proj": 342810,
201
+ "model.layers.31.self_attn.q_proj": 322170,
202
+ "model.layers.31.self_attn.v_proj": 327819,
203
+ "model.layers.4.mlp.down_proj": 767625,
204
+ "model.layers.4.mlp.gate_proj": 658258,
205
+ "model.layers.4.mlp.up_proj": 755330,
206
+ "model.layers.4.self_attn.k_proj": 454540,
207
+ "model.layers.4.self_attn.o_proj": 323318,
208
+ "model.layers.4.self_attn.q_proj": 438780,
209
+ "model.layers.4.self_attn.v_proj": 318874,
210
+ "model.layers.5.mlp.down_proj": 748111,
211
+ "model.layers.5.mlp.gate_proj": 670481,
212
+ "model.layers.5.mlp.up_proj": 752601,
213
+ "model.layers.5.self_attn.k_proj": 414950,
214
+ "model.layers.5.self_attn.o_proj": 349395,
215
+ "model.layers.5.self_attn.q_proj": 399806,
216
+ "model.layers.5.self_attn.v_proj": 360478,
217
+ "model.layers.6.mlp.down_proj": 769047,
218
+ "model.layers.6.mlp.gate_proj": 685535,
219
+ "model.layers.6.mlp.up_proj": 767187,
220
+ "model.layers.6.self_attn.k_proj": 377153,
221
+ "model.layers.6.self_attn.o_proj": 302937,
222
+ "model.layers.6.self_attn.q_proj": 356321,
223
+ "model.layers.6.self_attn.v_proj": 321553,
224
+ "model.layers.7.mlp.down_proj": 790176,
225
+ "model.layers.7.mlp.gate_proj": 702881,
226
+ "model.layers.7.mlp.up_proj": 787154,
227
+ "model.layers.7.self_attn.k_proj": 361359,
228
+ "model.layers.7.self_attn.o_proj": 299360,
229
+ "model.layers.7.self_attn.q_proj": 344262,
230
+ "model.layers.7.self_attn.v_proj": 303575,
231
+ "model.layers.8.mlp.down_proj": 805556,
232
+ "model.layers.8.mlp.gate_proj": 762210,
233
+ "model.layers.8.mlp.up_proj": 797354,
234
+ "model.layers.8.self_attn.k_proj": 389519,
235
+ "model.layers.8.self_attn.o_proj": 299609,
236
+ "model.layers.8.self_attn.q_proj": 374913,
237
+ "model.layers.8.self_attn.v_proj": 312783,
238
+ "model.layers.9.mlp.down_proj": 809737,
239
+ "model.layers.9.mlp.gate_proj": 799184,
240
+ "model.layers.9.mlp.up_proj": 798637,
241
+ "model.layers.9.self_attn.k_proj": 378165,
242
+ "model.layers.9.self_attn.o_proj": 309393,
243
+ "model.layers.9.self_attn.q_proj": 368689,
244
+ "model.layers.9.self_attn.v_proj": 317669
245
+ }
246
+ },
247
+ "architectures": [
248
+ "StableLmForCausalLM"
249
+ ],
250
+ "attention_dropout": 0.0,
251
+ "bos_token_id": 0,
252
+ "eos_token_id": 0,
253
+ "hidden_act": "silu",
254
+ "hidden_dropout": 0.0,
255
+ "hidden_size": 2560,
256
+ "initializer_range": 0.02,
257
+ "intermediate_size": 6912,
258
+ "layer_norm_eps": 1e-05,
259
+ "max_position_embeddings": 4096,
260
+ "model_type": "stablelm",
261
+ "num_attention_heads": 32,
262
+ "num_hidden_layers": 32,
263
+ "num_key_value_heads": 32,
264
+ "partial_rotary_factor": 0.25,
265
+ "rope_scaling": null,
266
+ "rope_theta": 10000,
267
+ "tie_word_embeddings": false,
268
+ "torch_dtype": "float16",
269
+ "transformers_version": "4.39.3",
270
+ "use_cache": true,
271
+ "use_qkv_bias": false,
272
+ "vocab_size": 50304
273
+ }
anyprec-stablelm-zephyr-3b-dns-3.79-4.9365-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:984c8847e71e3f2f57489aea9ed9dd8e19e147348d8266546264e74c8cfc05d1
3
+ size 2590630614
anyprec-stablelm-zephyr-3b-dns-3.79-4.9365-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<|endoftext|>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
anyprec-stablelm-zephyr-3b-dns-3.79-4.9365-w4_orig2-gc1-c4_s100_blk512/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
anyprec-stablelm-zephyr-3b-dns-3.79-4.9365-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json ADDED
@@ -0,0 +1,213 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|padding|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "50254": {
21
+ "content": " ",
22
+ "lstrip": false,
23
+ "normalized": true,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": false
27
+ },
28
+ "50255": {
29
+ "content": " ",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": false
35
+ },
36
+ "50256": {
37
+ "content": " ",
38
+ "lstrip": false,
39
+ "normalized": true,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": false
43
+ },
44
+ "50257": {
45
+ "content": " ",
46
+ "lstrip": false,
47
+ "normalized": true,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": false
51
+ },
52
+ "50258": {
53
+ "content": " ",
54
+ "lstrip": false,
55
+ "normalized": true,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": false
59
+ },
60
+ "50259": {
61
+ "content": " ",
62
+ "lstrip": false,
63
+ "normalized": true,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": false
67
+ },
68
+ "50260": {
69
+ "content": " ",
70
+ "lstrip": false,
71
+ "normalized": true,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": false
75
+ },
76
+ "50261": {
77
+ "content": " ",
78
+ "lstrip": false,
79
+ "normalized": true,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": false
83
+ },
84
+ "50262": {
85
+ "content": " ",
86
+ "lstrip": false,
87
+ "normalized": true,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": false
91
+ },
92
+ "50263": {
93
+ "content": " ",
94
+ "lstrip": false,
95
+ "normalized": true,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": false
99
+ },
100
+ "50264": {
101
+ "content": " ",
102
+ "lstrip": false,
103
+ "normalized": true,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": false
107
+ },
108
+ "50265": {
109
+ "content": " ",
110
+ "lstrip": false,
111
+ "normalized": true,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": false
115
+ },
116
+ "50266": {
117
+ "content": " ",
118
+ "lstrip": false,
119
+ "normalized": true,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": false
123
+ },
124
+ "50267": {
125
+ "content": " ",
126
+ "lstrip": false,
127
+ "normalized": true,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": false
131
+ },
132
+ "50268": {
133
+ "content": " ",
134
+ "lstrip": false,
135
+ "normalized": true,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": false
139
+ },
140
+ "50269": {
141
+ "content": " ",
142
+ "lstrip": false,
143
+ "normalized": true,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": false
147
+ },
148
+ "50270": {
149
+ "content": " ",
150
+ "lstrip": false,
151
+ "normalized": true,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": false
155
+ },
156
+ "50271": {
157
+ "content": " ",
158
+ "lstrip": false,
159
+ "normalized": true,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": false
163
+ },
164
+ "50272": {
165
+ "content": " ",
166
+ "lstrip": false,
167
+ "normalized": true,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": false
171
+ },
172
+ "50273": {
173
+ "content": " ",
174
+ "lstrip": false,
175
+ "normalized": true,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": false
179
+ },
180
+ "50274": {
181
+ "content": " ",
182
+ "lstrip": false,
183
+ "normalized": true,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": false
187
+ },
188
+ "50275": {
189
+ "content": " ",
190
+ "lstrip": false,
191
+ "normalized": true,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": false
195
+ },
196
+ "50276": {
197
+ "content": " ",
198
+ "lstrip": false,
199
+ "normalized": true,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": false
203
+ }
204
+ },
205
+ "bos_token": "<|endoftext|>",
206
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
207
+ "clean_up_tokenization_spaces": true,
208
+ "eos_token": "<|endoftext|>",
209
+ "model_max_length": 2048,
210
+ "pad_token": "<|endoftext|>",
211
+ "tokenizer_class": "GPTNeoXTokenizer",
212
+ "unk_token": "<|endoftext|>"
213
+ }
anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/config.json ADDED
@@ -0,0 +1,273 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "checkpoints/lmsys/vicuna-7b-v1.5",
3
+ "anyprec": {
4
+ "arch_config": {
5
+ "layers_name": "layers",
6
+ "model_name": "model",
7
+ "module_names": [
8
+ "self_attn.q_proj",
9
+ "self_attn.k_proj",
10
+ "self_attn.v_proj",
11
+ "self_attn.o_proj",
12
+ "mlp.gate_proj",
13
+ "mlp.up_proj",
14
+ "mlp.down_proj"
15
+ ]
16
+ },
17
+ "group_count": 1,
18
+ "parent_precision": 4,
19
+ "seed_precision": 2,
20
+ "sparse_numvals": {
21
+ "model.layers.0.mlp.down_proj": 865717,
22
+ "model.layers.0.mlp.gate_proj": 891731,
23
+ "model.layers.0.mlp.up_proj": 852167,
24
+ "model.layers.0.self_attn.k_proj": 1666837,
25
+ "model.layers.0.self_attn.o_proj": 1167262,
26
+ "model.layers.0.self_attn.q_proj": 1863615,
27
+ "model.layers.0.self_attn.v_proj": 969624,
28
+ "model.layers.1.mlp.down_proj": 807017,
29
+ "model.layers.1.mlp.gate_proj": 839914,
30
+ "model.layers.1.mlp.up_proj": 822530,
31
+ "model.layers.1.self_attn.k_proj": 1576204,
32
+ "model.layers.1.self_attn.o_proj": 1519063,
33
+ "model.layers.1.self_attn.q_proj": 1832663,
34
+ "model.layers.1.self_attn.v_proj": 1032409,
35
+ "model.layers.10.mlp.down_proj": 846612,
36
+ "model.layers.10.mlp.gate_proj": 1101086,
37
+ "model.layers.10.mlp.up_proj": 847383,
38
+ "model.layers.10.self_attn.k_proj": 832812,
39
+ "model.layers.10.self_attn.o_proj": 331226,
40
+ "model.layers.10.self_attn.q_proj": 722725,
41
+ "model.layers.10.self_attn.v_proj": 355123,
42
+ "model.layers.11.mlp.down_proj": 835304,
43
+ "model.layers.11.mlp.gate_proj": 1058635,
44
+ "model.layers.11.mlp.up_proj": 839004,
45
+ "model.layers.11.self_attn.k_proj": 894813,
46
+ "model.layers.11.self_attn.o_proj": 362320,
47
+ "model.layers.11.self_attn.q_proj": 841764,
48
+ "model.layers.11.self_attn.v_proj": 389021,
49
+ "model.layers.12.mlp.down_proj": 834675,
50
+ "model.layers.12.mlp.gate_proj": 1032051,
51
+ "model.layers.12.mlp.up_proj": 835628,
52
+ "model.layers.12.self_attn.k_proj": 785556,
53
+ "model.layers.12.self_attn.o_proj": 350763,
54
+ "model.layers.12.self_attn.q_proj": 690419,
55
+ "model.layers.12.self_attn.v_proj": 379603,
56
+ "model.layers.13.mlp.down_proj": 838436,
57
+ "model.layers.13.mlp.gate_proj": 1038017,
58
+ "model.layers.13.mlp.up_proj": 843742,
59
+ "model.layers.13.self_attn.k_proj": 731054,
60
+ "model.layers.13.self_attn.o_proj": 346685,
61
+ "model.layers.13.self_attn.q_proj": 671416,
62
+ "model.layers.13.self_attn.v_proj": 372488,
63
+ "model.layers.14.mlp.down_proj": 829572,
64
+ "model.layers.14.mlp.gate_proj": 1003234,
65
+ "model.layers.14.mlp.up_proj": 836142,
66
+ "model.layers.14.self_attn.k_proj": 751100,
67
+ "model.layers.14.self_attn.o_proj": 332196,
68
+ "model.layers.14.self_attn.q_proj": 685869,
69
+ "model.layers.14.self_attn.v_proj": 357686,
70
+ "model.layers.15.mlp.down_proj": 838548,
71
+ "model.layers.15.mlp.gate_proj": 1029194,
72
+ "model.layers.15.mlp.up_proj": 845673,
73
+ "model.layers.15.self_attn.k_proj": 705811,
74
+ "model.layers.15.self_attn.o_proj": 334447,
75
+ "model.layers.15.self_attn.q_proj": 625606,
76
+ "model.layers.15.self_attn.v_proj": 352729,
77
+ "model.layers.16.mlp.down_proj": 826342,
78
+ "model.layers.16.mlp.gate_proj": 994497,
79
+ "model.layers.16.mlp.up_proj": 827543,
80
+ "model.layers.16.self_attn.k_proj": 753410,
81
+ "model.layers.16.self_attn.o_proj": 323099,
82
+ "model.layers.16.self_attn.q_proj": 665073,
83
+ "model.layers.16.self_attn.v_proj": 347081,
84
+ "model.layers.17.mlp.down_proj": 811167,
85
+ "model.layers.17.mlp.gate_proj": 972726,
86
+ "model.layers.17.mlp.up_proj": 812071,
87
+ "model.layers.17.self_attn.k_proj": 659277,
88
+ "model.layers.17.self_attn.o_proj": 317972,
89
+ "model.layers.17.self_attn.q_proj": 600119,
90
+ "model.layers.17.self_attn.v_proj": 333850,
91
+ "model.layers.18.mlp.down_proj": 806090,
92
+ "model.layers.18.mlp.gate_proj": 960664,
93
+ "model.layers.18.mlp.up_proj": 808056,
94
+ "model.layers.18.self_attn.k_proj": 625283,
95
+ "model.layers.18.self_attn.o_proj": 319237,
96
+ "model.layers.18.self_attn.q_proj": 566406,
97
+ "model.layers.18.self_attn.v_proj": 340075,
98
+ "model.layers.19.mlp.down_proj": 800026,
99
+ "model.layers.19.mlp.gate_proj": 933181,
100
+ "model.layers.19.mlp.up_proj": 800906,
101
+ "model.layers.19.self_attn.k_proj": 672961,
102
+ "model.layers.19.self_attn.o_proj": 323140,
103
+ "model.layers.19.self_attn.q_proj": 607565,
104
+ "model.layers.19.self_attn.v_proj": 344369,
105
+ "model.layers.2.mlp.down_proj": 790912,
106
+ "model.layers.2.mlp.gate_proj": 799857,
107
+ "model.layers.2.mlp.up_proj": 801079,
108
+ "model.layers.2.self_attn.k_proj": 1427644,
109
+ "model.layers.2.self_attn.o_proj": 346883,
110
+ "model.layers.2.self_attn.q_proj": 1150405,
111
+ "model.layers.2.self_attn.v_proj": 363279,
112
+ "model.layers.20.mlp.down_proj": 793927,
113
+ "model.layers.20.mlp.gate_proj": 894220,
114
+ "model.layers.20.mlp.up_proj": 794121,
115
+ "model.layers.20.self_attn.k_proj": 714458,
116
+ "model.layers.20.self_attn.o_proj": 307963,
117
+ "model.layers.20.self_attn.q_proj": 647779,
118
+ "model.layers.20.self_attn.v_proj": 319545,
119
+ "model.layers.21.mlp.down_proj": 791701,
120
+ "model.layers.21.mlp.gate_proj": 878917,
121
+ "model.layers.21.mlp.up_proj": 786352,
122
+ "model.layers.21.self_attn.k_proj": 724078,
123
+ "model.layers.21.self_attn.o_proj": 307127,
124
+ "model.layers.21.self_attn.q_proj": 658484,
125
+ "model.layers.21.self_attn.v_proj": 324002,
126
+ "model.layers.22.mlp.down_proj": 789108,
127
+ "model.layers.22.mlp.gate_proj": 864456,
128
+ "model.layers.22.mlp.up_proj": 785534,
129
+ "model.layers.22.self_attn.k_proj": 668520,
130
+ "model.layers.22.self_attn.o_proj": 322998,
131
+ "model.layers.22.self_attn.q_proj": 610997,
132
+ "model.layers.22.self_attn.v_proj": 331764,
133
+ "model.layers.23.mlp.down_proj": 785969,
134
+ "model.layers.23.mlp.gate_proj": 844693,
135
+ "model.layers.23.mlp.up_proj": 784126,
136
+ "model.layers.23.self_attn.k_proj": 596794,
137
+ "model.layers.23.self_attn.o_proj": 321698,
138
+ "model.layers.23.self_attn.q_proj": 557578,
139
+ "model.layers.23.self_attn.v_proj": 332073,
140
+ "model.layers.24.mlp.down_proj": 783216,
141
+ "model.layers.24.mlp.gate_proj": 850741,
142
+ "model.layers.24.mlp.up_proj": 781510,
143
+ "model.layers.24.self_attn.k_proj": 772877,
144
+ "model.layers.24.self_attn.o_proj": 314960,
145
+ "model.layers.24.self_attn.q_proj": 713560,
146
+ "model.layers.24.self_attn.v_proj": 322067,
147
+ "model.layers.25.mlp.down_proj": 789855,
148
+ "model.layers.25.mlp.gate_proj": 866039,
149
+ "model.layers.25.mlp.up_proj": 781967,
150
+ "model.layers.25.self_attn.k_proj": 614644,
151
+ "model.layers.25.self_attn.o_proj": 311631,
152
+ "model.layers.25.self_attn.q_proj": 590696,
153
+ "model.layers.25.self_attn.v_proj": 320576,
154
+ "model.layers.26.mlp.down_proj": 794329,
155
+ "model.layers.26.mlp.gate_proj": 902478,
156
+ "model.layers.26.mlp.up_proj": 784671,
157
+ "model.layers.26.self_attn.k_proj": 669669,
158
+ "model.layers.26.self_attn.o_proj": 363895,
159
+ "model.layers.26.self_attn.q_proj": 625906,
160
+ "model.layers.26.self_attn.v_proj": 352631,
161
+ "model.layers.27.mlp.down_proj": 804636,
162
+ "model.layers.27.mlp.gate_proj": 940867,
163
+ "model.layers.27.mlp.up_proj": 788268,
164
+ "model.layers.27.self_attn.k_proj": 492336,
165
+ "model.layers.27.self_attn.o_proj": 324286,
166
+ "model.layers.27.self_attn.q_proj": 475451,
167
+ "model.layers.27.self_attn.v_proj": 325662,
168
+ "model.layers.28.mlp.down_proj": 817492,
169
+ "model.layers.28.mlp.gate_proj": 972600,
170
+ "model.layers.28.mlp.up_proj": 800454,
171
+ "model.layers.28.self_attn.k_proj": 553908,
172
+ "model.layers.28.self_attn.o_proj": 336761,
173
+ "model.layers.28.self_attn.q_proj": 533911,
174
+ "model.layers.28.self_attn.v_proj": 350670,
175
+ "model.layers.29.mlp.down_proj": 845082,
176
+ "model.layers.29.mlp.gate_proj": 1019804,
177
+ "model.layers.29.mlp.up_proj": 812861,
178
+ "model.layers.29.self_attn.k_proj": 637146,
179
+ "model.layers.29.self_attn.o_proj": 320821,
180
+ "model.layers.29.self_attn.q_proj": 602307,
181
+ "model.layers.29.self_attn.v_proj": 333849,
182
+ "model.layers.3.mlp.down_proj": 799046,
183
+ "model.layers.3.mlp.gate_proj": 819762,
184
+ "model.layers.3.mlp.up_proj": 807689,
185
+ "model.layers.3.self_attn.k_proj": 1037087,
186
+ "model.layers.3.self_attn.o_proj": 381601,
187
+ "model.layers.3.self_attn.q_proj": 882621,
188
+ "model.layers.3.self_attn.v_proj": 393971,
189
+ "model.layers.30.mlp.down_proj": 915437,
190
+ "model.layers.30.mlp.gate_proj": 1066678,
191
+ "model.layers.30.mlp.up_proj": 833731,
192
+ "model.layers.30.self_attn.k_proj": 513532,
193
+ "model.layers.30.self_attn.o_proj": 321348,
194
+ "model.layers.30.self_attn.q_proj": 497984,
195
+ "model.layers.30.self_attn.v_proj": 325219,
196
+ "model.layers.31.mlp.down_proj": 1046449,
197
+ "model.layers.31.mlp.gate_proj": 1064742,
198
+ "model.layers.31.mlp.up_proj": 913077,
199
+ "model.layers.31.self_attn.k_proj": 587052,
200
+ "model.layers.31.self_attn.o_proj": 362137,
201
+ "model.layers.31.self_attn.q_proj": 515769,
202
+ "model.layers.31.self_attn.v_proj": 359219,
203
+ "model.layers.4.mlp.down_proj": 809715,
204
+ "model.layers.4.mlp.gate_proj": 877702,
205
+ "model.layers.4.mlp.up_proj": 816908,
206
+ "model.layers.4.self_attn.k_proj": 903251,
207
+ "model.layers.4.self_attn.o_proj": 319964,
208
+ "model.layers.4.self_attn.q_proj": 761555,
209
+ "model.layers.4.self_attn.v_proj": 338525,
210
+ "model.layers.5.mlp.down_proj": 818173,
211
+ "model.layers.5.mlp.gate_proj": 940627,
212
+ "model.layers.5.mlp.up_proj": 822212,
213
+ "model.layers.5.self_attn.k_proj": 814611,
214
+ "model.layers.5.self_attn.o_proj": 348464,
215
+ "model.layers.5.self_attn.q_proj": 684653,
216
+ "model.layers.5.self_attn.v_proj": 377853,
217
+ "model.layers.6.mlp.down_proj": 816090,
218
+ "model.layers.6.mlp.gate_proj": 979907,
219
+ "model.layers.6.mlp.up_proj": 825004,
220
+ "model.layers.6.self_attn.k_proj": 894466,
221
+ "model.layers.6.self_attn.o_proj": 370829,
222
+ "model.layers.6.self_attn.q_proj": 806875,
223
+ "model.layers.6.self_attn.v_proj": 408275,
224
+ "model.layers.7.mlp.down_proj": 817051,
225
+ "model.layers.7.mlp.gate_proj": 1011373,
226
+ "model.layers.7.mlp.up_proj": 825160,
227
+ "model.layers.7.self_attn.k_proj": 870797,
228
+ "model.layers.7.self_attn.o_proj": 392151,
229
+ "model.layers.7.self_attn.q_proj": 815554,
230
+ "model.layers.7.self_attn.v_proj": 426945,
231
+ "model.layers.8.mlp.down_proj": 824092,
232
+ "model.layers.8.mlp.gate_proj": 1018165,
233
+ "model.layers.8.mlp.up_proj": 827172,
234
+ "model.layers.8.self_attn.k_proj": 820539,
235
+ "model.layers.8.self_attn.o_proj": 345716,
236
+ "model.layers.8.self_attn.q_proj": 757234,
237
+ "model.layers.8.self_attn.v_proj": 378191,
238
+ "model.layers.9.mlp.down_proj": 834891,
239
+ "model.layers.9.mlp.gate_proj": 1037677,
240
+ "model.layers.9.mlp.up_proj": 835887,
241
+ "model.layers.9.self_attn.k_proj": 739574,
242
+ "model.layers.9.self_attn.o_proj": 346792,
243
+ "model.layers.9.self_attn.q_proj": 655396,
244
+ "model.layers.9.self_attn.v_proj": 385099
245
+ }
246
+ },
247
+ "architectures": [
248
+ "LlamaForCausalLM"
249
+ ],
250
+ "attention_bias": false,
251
+ "attention_dropout": 0.0,
252
+ "bos_token_id": 1,
253
+ "eos_token_id": 2,
254
+ "hidden_act": "silu",
255
+ "hidden_size": 4096,
256
+ "initializer_range": 0.02,
257
+ "intermediate_size": 11008,
258
+ "max_position_embeddings": 4096,
259
+ "model_type": "llama",
260
+ "num_attention_heads": 32,
261
+ "num_hidden_layers": 32,
262
+ "num_key_value_heads": 32,
263
+ "pad_token_id": 0,
264
+ "pretraining_tp": 1,
265
+ "rms_norm_eps": 1e-05,
266
+ "rope_scaling": null,
267
+ "rope_theta": 10000.0,
268
+ "tie_word_embeddings": false,
269
+ "torch_dtype": "float16",
270
+ "transformers_version": "4.39.3",
271
+ "use_cache": true,
272
+ "vocab_size": 32000
273
+ }
anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54cf7cbe90b742a57f9690fbe14ffb09f2478e3064fd0db3bb122dfbc07a24bc
3
+ size 4793903381
anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
anyprec-vicuna-7b-v1.5-dns-2.39-2.4375-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": true,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "legacy": false,
35
+ "model_max_length": 4096,
36
+ "pad_token": "<unk>",
37
+ "padding_side": "right",
38
+ "sp_model_kwargs": {},
39
+ "spaces_between_special_tokens": false,
40
+ "tokenizer_class": "LlamaTokenizer",
41
+ "unk_token": "<unk>",
42
+ "use_default_system_prompt": false
43
+ }
anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/config.json ADDED
@@ -0,0 +1,273 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "checkpoints/lmsys/vicuna-7b-v1.5",
3
+ "anyprec": {
4
+ "arch_config": {
5
+ "layers_name": "layers",
6
+ "model_name": "model",
7
+ "module_names": [
8
+ "self_attn.q_proj",
9
+ "self_attn.k_proj",
10
+ "self_attn.v_proj",
11
+ "self_attn.o_proj",
12
+ "mlp.gate_proj",
13
+ "mlp.up_proj",
14
+ "mlp.down_proj"
15
+ ]
16
+ },
17
+ "group_count": 1,
18
+ "parent_precision": 4,
19
+ "seed_precision": 2,
20
+ "sparse_numvals": {
21
+ "model.layers.0.mlp.down_proj": 1233978,
22
+ "model.layers.0.mlp.gate_proj": 1258296,
23
+ "model.layers.0.mlp.up_proj": 1224078,
24
+ "model.layers.0.self_attn.k_proj": 1883193,
25
+ "model.layers.0.self_attn.o_proj": 1335571,
26
+ "model.layers.0.self_attn.q_proj": 2066059,
27
+ "model.layers.0.self_attn.v_proj": 1149426,
28
+ "model.layers.1.mlp.down_proj": 1170303,
29
+ "model.layers.1.mlp.gate_proj": 1203169,
30
+ "model.layers.1.mlp.up_proj": 1193914,
31
+ "model.layers.1.self_attn.k_proj": 1774927,
32
+ "model.layers.1.self_attn.o_proj": 1735353,
33
+ "model.layers.1.self_attn.q_proj": 2045430,
34
+ "model.layers.1.self_attn.v_proj": 1233905,
35
+ "model.layers.10.mlp.down_proj": 1214601,
36
+ "model.layers.10.mlp.gate_proj": 1496901,
37
+ "model.layers.10.mlp.up_proj": 1216897,
38
+ "model.layers.10.self_attn.k_proj": 1031993,
39
+ "model.layers.10.self_attn.o_proj": 469774,
40
+ "model.layers.10.self_attn.q_proj": 910405,
41
+ "model.layers.10.self_attn.v_proj": 499623,
42
+ "model.layers.11.mlp.down_proj": 1202594,
43
+ "model.layers.11.mlp.gate_proj": 1447186,
44
+ "model.layers.11.mlp.up_proj": 1208047,
45
+ "model.layers.11.self_attn.k_proj": 1091923,
46
+ "model.layers.11.self_attn.o_proj": 506382,
47
+ "model.layers.11.self_attn.q_proj": 1031388,
48
+ "model.layers.11.self_attn.v_proj": 538763,
49
+ "model.layers.12.mlp.down_proj": 1203837,
50
+ "model.layers.12.mlp.gate_proj": 1414253,
51
+ "model.layers.12.mlp.up_proj": 1204037,
52
+ "model.layers.12.self_attn.k_proj": 979989,
53
+ "model.layers.12.self_attn.o_proj": 491939,
54
+ "model.layers.12.self_attn.q_proj": 872235,
55
+ "model.layers.12.self_attn.v_proj": 525411,
56
+ "model.layers.13.mlp.down_proj": 1203607,
57
+ "model.layers.13.mlp.gate_proj": 1423537,
58
+ "model.layers.13.mlp.up_proj": 1209880,
59
+ "model.layers.13.self_attn.k_proj": 917637,
60
+ "model.layers.13.self_attn.o_proj": 486449,
61
+ "model.layers.13.self_attn.q_proj": 854578,
62
+ "model.layers.13.self_attn.v_proj": 518950,
63
+ "model.layers.14.mlp.down_proj": 1197605,
64
+ "model.layers.14.mlp.gate_proj": 1381285,
65
+ "model.layers.14.mlp.up_proj": 1204898,
66
+ "model.layers.14.self_attn.k_proj": 940564,
67
+ "model.layers.14.self_attn.o_proj": 470137,
68
+ "model.layers.14.self_attn.q_proj": 867007,
69
+ "model.layers.14.self_attn.v_proj": 501744,
70
+ "model.layers.15.mlp.down_proj": 1208918,
71
+ "model.layers.15.mlp.gate_proj": 1408663,
72
+ "model.layers.15.mlp.up_proj": 1214748,
73
+ "model.layers.15.self_attn.k_proj": 893986,
74
+ "model.layers.15.self_attn.o_proj": 474383,
75
+ "model.layers.15.self_attn.q_proj": 805850,
76
+ "model.layers.15.self_attn.v_proj": 496044,
77
+ "model.layers.16.mlp.down_proj": 1193388,
78
+ "model.layers.16.mlp.gate_proj": 1369230,
79
+ "model.layers.16.mlp.up_proj": 1196630,
80
+ "model.layers.16.self_attn.k_proj": 943400,
81
+ "model.layers.16.self_attn.o_proj": 459789,
82
+ "model.layers.16.self_attn.q_proj": 844853,
83
+ "model.layers.16.self_attn.v_proj": 490430,
84
+ "model.layers.17.mlp.down_proj": 1176012,
85
+ "model.layers.17.mlp.gate_proj": 1343222,
86
+ "model.layers.17.mlp.up_proj": 1180452,
87
+ "model.layers.17.self_attn.k_proj": 842348,
88
+ "model.layers.17.self_attn.o_proj": 455401,
89
+ "model.layers.17.self_attn.q_proj": 776532,
90
+ "model.layers.17.self_attn.v_proj": 472993,
91
+ "model.layers.18.mlp.down_proj": 1171208,
92
+ "model.layers.18.mlp.gate_proj": 1331858,
93
+ "model.layers.18.mlp.up_proj": 1171681,
94
+ "model.layers.18.self_attn.k_proj": 807039,
95
+ "model.layers.18.self_attn.o_proj": 455862,
96
+ "model.layers.18.self_attn.q_proj": 741688,
97
+ "model.layers.18.self_attn.v_proj": 481202,
98
+ "model.layers.19.mlp.down_proj": 1162136,
99
+ "model.layers.19.mlp.gate_proj": 1303078,
100
+ "model.layers.19.mlp.up_proj": 1163440,
101
+ "model.layers.19.self_attn.k_proj": 859732,
102
+ "model.layers.19.self_attn.o_proj": 462864,
103
+ "model.layers.19.self_attn.q_proj": 785772,
104
+ "model.layers.19.self_attn.v_proj": 486944,
105
+ "model.layers.2.mlp.down_proj": 1152157,
106
+ "model.layers.2.mlp.gate_proj": 1159260,
107
+ "model.layers.2.mlp.up_proj": 1165168,
108
+ "model.layers.2.self_attn.k_proj": 1663751,
109
+ "model.layers.2.self_attn.o_proj": 485472,
110
+ "model.layers.2.self_attn.q_proj": 1371023,
111
+ "model.layers.2.self_attn.v_proj": 512415,
112
+ "model.layers.20.mlp.down_proj": 1160157,
113
+ "model.layers.20.mlp.gate_proj": 1257357,
114
+ "model.layers.20.mlp.up_proj": 1155843,
115
+ "model.layers.20.self_attn.k_proj": 906734,
116
+ "model.layers.20.self_attn.o_proj": 443501,
117
+ "model.layers.20.self_attn.q_proj": 833323,
118
+ "model.layers.20.self_attn.v_proj": 457808,
119
+ "model.layers.21.mlp.down_proj": 1152844,
120
+ "model.layers.21.mlp.gate_proj": 1240343,
121
+ "model.layers.21.mlp.up_proj": 1145607,
122
+ "model.layers.21.self_attn.k_proj": 919054,
123
+ "model.layers.21.self_attn.o_proj": 442822,
124
+ "model.layers.21.self_attn.q_proj": 846683,
125
+ "model.layers.21.self_attn.v_proj": 462912,
126
+ "model.layers.22.mlp.down_proj": 1149367,
127
+ "model.layers.22.mlp.gate_proj": 1226597,
128
+ "model.layers.22.mlp.up_proj": 1145408,
129
+ "model.layers.22.self_attn.k_proj": 862091,
130
+ "model.layers.22.self_attn.o_proj": 462002,
131
+ "model.layers.22.self_attn.q_proj": 795237,
132
+ "model.layers.22.self_attn.v_proj": 472279,
133
+ "model.layers.23.mlp.down_proj": 1148308,
134
+ "model.layers.23.mlp.gate_proj": 1205318,
135
+ "model.layers.23.mlp.up_proj": 1143303,
136
+ "model.layers.23.self_attn.k_proj": 780914,
137
+ "model.layers.23.self_attn.o_proj": 458216,
138
+ "model.layers.23.self_attn.q_proj": 736854,
139
+ "model.layers.23.self_attn.v_proj": 473637,
140
+ "model.layers.24.mlp.down_proj": 1142395,
141
+ "model.layers.24.mlp.gate_proj": 1214395,
142
+ "model.layers.24.mlp.up_proj": 1137655,
143
+ "model.layers.24.self_attn.k_proj": 977188,
144
+ "model.layers.24.self_attn.o_proj": 451991,
145
+ "model.layers.24.self_attn.q_proj": 908230,
146
+ "model.layers.24.self_attn.v_proj": 462751,
147
+ "model.layers.25.mlp.down_proj": 1147862,
148
+ "model.layers.25.mlp.gate_proj": 1227621,
149
+ "model.layers.25.mlp.up_proj": 1142373,
150
+ "model.layers.25.self_attn.k_proj": 796583,
151
+ "model.layers.25.self_attn.o_proj": 446459,
152
+ "model.layers.25.self_attn.q_proj": 771205,
153
+ "model.layers.25.self_attn.v_proj": 459858,
154
+ "model.layers.26.mlp.down_proj": 1157619,
155
+ "model.layers.26.mlp.gate_proj": 1270276,
156
+ "model.layers.26.mlp.up_proj": 1143819,
157
+ "model.layers.26.self_attn.k_proj": 861270,
158
+ "model.layers.26.self_attn.o_proj": 506930,
159
+ "model.layers.26.self_attn.q_proj": 812193,
160
+ "model.layers.26.self_attn.v_proj": 496655,
161
+ "model.layers.27.mlp.down_proj": 1167794,
162
+ "model.layers.27.mlp.gate_proj": 1311278,
163
+ "model.layers.27.mlp.up_proj": 1150479,
164
+ "model.layers.27.self_attn.k_proj": 660898,
165
+ "model.layers.27.self_attn.o_proj": 462408,
166
+ "model.layers.27.self_attn.q_proj": 639690,
167
+ "model.layers.27.self_attn.v_proj": 465375,
168
+ "model.layers.28.mlp.down_proj": 1185382,
169
+ "model.layers.28.mlp.gate_proj": 1339786,
170
+ "model.layers.28.mlp.up_proj": 1160799,
171
+ "model.layers.28.self_attn.k_proj": 729108,
172
+ "model.layers.28.self_attn.o_proj": 477348,
173
+ "model.layers.28.self_attn.q_proj": 705351,
174
+ "model.layers.28.self_attn.v_proj": 493689,
175
+ "model.layers.29.mlp.down_proj": 1213857,
176
+ "model.layers.29.mlp.gate_proj": 1391366,
177
+ "model.layers.29.mlp.up_proj": 1176903,
178
+ "model.layers.29.self_attn.k_proj": 819925,
179
+ "model.layers.29.self_attn.o_proj": 458954,
180
+ "model.layers.29.self_attn.q_proj": 780980,
181
+ "model.layers.29.self_attn.v_proj": 475585,
182
+ "model.layers.3.mlp.down_proj": 1160791,
183
+ "model.layers.3.mlp.gate_proj": 1181464,
184
+ "model.layers.3.mlp.up_proj": 1173791,
185
+ "model.layers.3.self_attn.k_proj": 1255296,
186
+ "model.layers.3.self_attn.o_proj": 532234,
187
+ "model.layers.3.self_attn.q_proj": 1087792,
188
+ "model.layers.3.self_attn.v_proj": 546946,
189
+ "model.layers.30.mlp.down_proj": 1296643,
190
+ "model.layers.30.mlp.gate_proj": 1443559,
191
+ "model.layers.30.mlp.up_proj": 1200401,
192
+ "model.layers.30.self_attn.k_proj": 682857,
193
+ "model.layers.30.self_attn.o_proj": 458368,
194
+ "model.layers.30.self_attn.q_proj": 662921,
195
+ "model.layers.30.self_attn.v_proj": 463573,
196
+ "model.layers.31.mlp.down_proj": 1451063,
197
+ "model.layers.31.mlp.gate_proj": 1443243,
198
+ "model.layers.31.mlp.up_proj": 1291025,
199
+ "model.layers.31.self_attn.k_proj": 761264,
200
+ "model.layers.31.self_attn.o_proj": 504372,
201
+ "model.layers.31.self_attn.q_proj": 682521,
202
+ "model.layers.31.self_attn.v_proj": 503567,
203
+ "model.layers.4.mlp.down_proj": 1174946,
204
+ "model.layers.4.mlp.gate_proj": 1247025,
205
+ "model.layers.4.mlp.up_proj": 1183052,
206
+ "model.layers.4.self_attn.k_proj": 1113167,
207
+ "model.layers.4.self_attn.o_proj": 457105,
208
+ "model.layers.4.self_attn.q_proj": 956349,
209
+ "model.layers.4.self_attn.v_proj": 480678,
210
+ "model.layers.5.mlp.down_proj": 1185177,
211
+ "model.layers.5.mlp.gate_proj": 1315315,
212
+ "model.layers.5.mlp.up_proj": 1190124,
213
+ "model.layers.5.self_attn.k_proj": 1013955,
214
+ "model.layers.5.self_attn.o_proj": 492438,
215
+ "model.layers.5.self_attn.q_proj": 871171,
216
+ "model.layers.5.self_attn.v_proj": 529252,
217
+ "model.layers.6.mlp.down_proj": 1185025,
218
+ "model.layers.6.mlp.gate_proj": 1363849,
219
+ "model.layers.6.mlp.up_proj": 1189310,
220
+ "model.layers.6.self_attn.k_proj": 1096207,
221
+ "model.layers.6.self_attn.o_proj": 514822,
222
+ "model.layers.6.self_attn.q_proj": 1000413,
223
+ "model.layers.6.self_attn.v_proj": 559422,
224
+ "model.layers.7.mlp.down_proj": 1185723,
225
+ "model.layers.7.mlp.gate_proj": 1396153,
226
+ "model.layers.7.mlp.up_proj": 1192986,
227
+ "model.layers.7.self_attn.k_proj": 1072532,
228
+ "model.layers.7.self_attn.o_proj": 541576,
229
+ "model.layers.7.self_attn.q_proj": 1010874,
230
+ "model.layers.7.self_attn.v_proj": 579662,
231
+ "model.layers.8.mlp.down_proj": 1195389,
232
+ "model.layers.8.mlp.gate_proj": 1405540,
233
+ "model.layers.8.mlp.up_proj": 1195047,
234
+ "model.layers.8.self_attn.k_proj": 1017816,
235
+ "model.layers.8.self_attn.o_proj": 486159,
236
+ "model.layers.8.self_attn.q_proj": 944341,
237
+ "model.layers.8.self_attn.v_proj": 524279,
238
+ "model.layers.9.mlp.down_proj": 1204613,
239
+ "model.layers.9.mlp.gate_proj": 1426994,
240
+ "model.layers.9.mlp.up_proj": 1202174,
241
+ "model.layers.9.self_attn.k_proj": 927717,
242
+ "model.layers.9.self_attn.o_proj": 488203,
243
+ "model.layers.9.self_attn.q_proj": 834266,
244
+ "model.layers.9.self_attn.v_proj": 531763
245
+ }
246
+ },
247
+ "architectures": [
248
+ "LlamaForCausalLM"
249
+ ],
250
+ "attention_bias": false,
251
+ "attention_dropout": 0.0,
252
+ "bos_token_id": 1,
253
+ "eos_token_id": 2,
254
+ "hidden_act": "silu",
255
+ "hidden_size": 4096,
256
+ "initializer_range": 0.02,
257
+ "intermediate_size": 11008,
258
+ "max_position_embeddings": 4096,
259
+ "model_type": "llama",
260
+ "num_attention_heads": 32,
261
+ "num_hidden_layers": 32,
262
+ "num_key_value_heads": 32,
263
+ "pad_token_id": 0,
264
+ "pretraining_tp": 1,
265
+ "rms_norm_eps": 1e-05,
266
+ "rope_scaling": null,
267
+ "rope_theta": 10000.0,
268
+ "tie_word_embeddings": false,
269
+ "torch_dtype": "float16",
270
+ "transformers_version": "4.39.3",
271
+ "use_cache": true,
272
+ "vocab_size": 32000
273
+ }
anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:003292230db40cc217852af6b98ddabf9e93e8b8c602b0b75ac9a5d812284ab3
3
+ size 5134593777
anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
anyprec-vicuna-7b-v1.5-dns-2.53-3.3125-w4_orig2-gc1-c4_s100_blk512/tokenizer_config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": true,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "legacy": false,
35
+ "model_max_length": 4096,
36
+ "pad_token": "<unk>",
37
+ "padding_side": "right",
38
+ "sp_model_kwargs": {},
39
+ "spaces_between_special_tokens": false,
40
+ "tokenizer_class": "LlamaTokenizer",
41
+ "unk_token": "<unk>",
42
+ "use_default_system_prompt": false
43
+ }