TheBloke commited on
Commit
641dfa5
1 Parent(s): 792d371

AWQ model commit

Browse files
added_tokens.json ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<dummy001>": 49740,
3
+ "<dummy49741>": 49741,
4
+ "<dummy49742>": 49742,
5
+ "<dummy49743>": 49743,
6
+ "<dummy49744>": 49744,
7
+ "<dummy49745>": 49745,
8
+ "<dummy49746>": 49746,
9
+ "<dummy49747>": 49747,
10
+ "<dummy49748>": 49748,
11
+ "<dummy49749>": 49749,
12
+ "<dummy49750>": 49750,
13
+ "<dummy49751>": 49751,
14
+ "<dummy49752>": 49752,
15
+ "<dummy49753>": 49753,
16
+ "<dummy49754>": 49754,
17
+ "<dummy49755>": 49755,
18
+ "<dummy49756>": 49756,
19
+ "<dummy49757>": 49757,
20
+ "<dummy49758>": 49758,
21
+ "<dummy49759>": 49759,
22
+ "<dummy49760>": 49760,
23
+ "<dummy49761>": 49761,
24
+ "<dummy49762>": 49762,
25
+ "<dummy49763>": 49763,
26
+ "<dummy49764>": 49764,
27
+ "<dummy49765>": 49765,
28
+ "<dummy49766>": 49766,
29
+ "<dummy49767>": 49767,
30
+ "<dummy49768>": 49768,
31
+ "<dummy49769>": 49769,
32
+ "<dummy49770>": 49770,
33
+ "<dummy49771>": 49771,
34
+ "<dummy49772>": 49772,
35
+ "<dummy49773>": 49773,
36
+ "<dummy49774>": 49774,
37
+ "<dummy49775>": 49775,
38
+ "<dummy49776>": 49776,
39
+ "<dummy49777>": 49777,
40
+ "<dummy49778>": 49778,
41
+ "<dummy49779>": 49779,
42
+ "<dummy49780>": 49780,
43
+ "<dummy49781>": 49781,
44
+ "<dummy49782>": 49782,
45
+ "<dummy49783>": 49783,
46
+ "<dummy49784>": 49784,
47
+ "<dummy49785>": 49785,
48
+ "<dummy49786>": 49786,
49
+ "<dummy49787>": 49787,
50
+ "<dummy49788>": 49788,
51
+ "<dummy49789>": 49789,
52
+ "<dummy49790>": 49790,
53
+ "<dummy49791>": 49791,
54
+ "<dummy49792>": 49792,
55
+ "<dummy49793>": 49793,
56
+ "<dummy49794>": 49794,
57
+ "<dummy49795>": 49795,
58
+ "<dummy49796>": 49796,
59
+ "<dummy49797>": 49797,
60
+ "<dummy49798>": 49798,
61
+ "<dummy49799>": 49799,
62
+ "<dummy49800>": 49800,
63
+ "<dummy49801>": 49801,
64
+ "<dummy49802>": 49802,
65
+ "<dummy49803>": 49803,
66
+ "<dummy49804>": 49804,
67
+ "<dummy49805>": 49805,
68
+ "<dummy49806>": 49806,
69
+ "<dummy49807>": 49807,
70
+ "<dummy49808>": 49808,
71
+ "<dummy49809>": 49809,
72
+ "<dummy49810>": 49810,
73
+ "<dummy49811>": 49811,
74
+ "<dummy49812>": 49812,
75
+ "<dummy49813>": 49813,
76
+ "<dummy49814>": 49814,
77
+ "<dummy49815>": 49815,
78
+ "<dummy49816>": 49816,
79
+ "<dummy49817>": 49817,
80
+ "<dummy49818>": 49818,
81
+ "<dummy49819>": 49819,
82
+ "<dummy49820>": 49820,
83
+ "<dummy49821>": 49821,
84
+ "<dummy49822>": 49822,
85
+ "<dummy49823>": 49823,
86
+ "<dummy49824>": 49824,
87
+ "<dummy49825>": 49825,
88
+ "<dummy49826>": 49826,
89
+ "<dummy49827>": 49827,
90
+ "<dummy49828>": 49828,
91
+ "<dummy49829>": 49829,
92
+ "<dummy49830>": 49830,
93
+ "<dummy49831>": 49831,
94
+ "<dummy49832>": 49832,
95
+ "<dummy49833>": 49833,
96
+ "<dummy49834>": 49834,
97
+ "<dummy49835>": 49835,
98
+ "<dummy49836>": 49836,
99
+ "<dummy49837>": 49837,
100
+ "<dummy49838>": 49838,
101
+ "<dummy49839>": 49839,
102
+ "<dummy49840>": 49840,
103
+ "<dummy49841>": 49841,
104
+ "<dummy49842>": 49842,
105
+ "<dummy49843>": 49843,
106
+ "<dummy49844>": 49844,
107
+ "<dummy49845>": 49845,
108
+ "<dummy49846>": 49846,
109
+ "<dummy49847>": 49847,
110
+ "<dummy49848>": 49848,
111
+ "<dummy49849>": 49849,
112
+ "<dummy49850>": 49850,
113
+ "<dummy49851>": 49851,
114
+ "<dummy49852>": 49852,
115
+ "<dummy49853>": 49853,
116
+ "<dummy49854>": 49854,
117
+ "<dummy49855>": 49855,
118
+ "<dummy49856>": 49856,
119
+ "<dummy49857>": 49857,
120
+ "<dummy49858>": 49858,
121
+ "<dummy49859>": 49859,
122
+ "<dummy49860>": 49860,
123
+ "<dummy49861>": 49861,
124
+ "<dummy49862>": 49862,
125
+ "<dummy49863>": 49863,
126
+ "<dummy49864>": 49864,
127
+ "<dummy49865>": 49865,
128
+ "<dummy49866>": 49866,
129
+ "<dummy49867>": 49867,
130
+ "<dummy49868>": 49868,
131
+ "<dummy49869>": 49869,
132
+ "<dummy49870>": 49870,
133
+ "<dummy49871>": 49871,
134
+ "<dummy49872>": 49872,
135
+ "<dummy49873>": 49873,
136
+ "<dummy49874>": 49874,
137
+ "<dummy49875>": 49875,
138
+ "<dummy49876>": 49876,
139
+ "<dummy49877>": 49877,
140
+ "<dummy49878>": 49878,
141
+ "<dummy49879>": 49879,
142
+ "<dummy49880>": 49880,
143
+ "<dummy49881>": 49881,
144
+ "<dummy49882>": 49882,
145
+ "<dummy49883>": 49883,
146
+ "<dummy49884>": 49884,
147
+ "<dummy49885>": 49885,
148
+ "<dummy49886>": 49886,
149
+ "<dummy49887>": 49887,
150
+ "<dummy49888>": 49888,
151
+ "<dummy49889>": 49889,
152
+ "<dummy49890>": 49890,
153
+ "<dummy49891>": 49891,
154
+ "<dummy49892>": 49892,
155
+ "<dummy49893>": 49893,
156
+ "<dummy49894>": 49894,
157
+ "<dummy49895>": 49895,
158
+ "<dummy49896>": 49896,
159
+ "<dummy49897>": 49897,
160
+ "<dummy49898>": 49898,
161
+ "<dummy49899>": 49899,
162
+ "<dummy49900>": 49900,
163
+ "<dummy49901>": 49901,
164
+ "<dummy49902>": 49902,
165
+ "<dummy49903>": 49903,
166
+ "<dummy49904>": 49904,
167
+ "<dummy49905>": 49905,
168
+ "<dummy49906>": 49906,
169
+ "<dummy49907>": 49907,
170
+ "<dummy49908>": 49908,
171
+ "<dummy49909>": 49909,
172
+ "<dummy49910>": 49910,
173
+ "<dummy49911>": 49911,
174
+ "<dummy49912>": 49912,
175
+ "<dummy49913>": 49913,
176
+ "<dummy49914>": 49914,
177
+ "<dummy49915>": 49915,
178
+ "<dummy49916>": 49916,
179
+ "<dummy49917>": 49917,
180
+ "<dummy49918>": 49918,
181
+ "<dummy49919>": 49919,
182
+ "<dummy49920>": 49920,
183
+ "<dummy49921>": 49921,
184
+ "<dummy49922>": 49922,
185
+ "<dummy49923>": 49923,
186
+ "<dummy49924>": 49924,
187
+ "<dummy49925>": 49925,
188
+ "<dummy49926>": 49926,
189
+ "<dummy49927>": 49927,
190
+ "<dummy49928>": 49928,
191
+ "<dummy49929>": 49929,
192
+ "<dummy49930>": 49930,
193
+ "<dummy49931>": 49931,
194
+ "<dummy49932>": 49932,
195
+ "<dummy49933>": 49933,
196
+ "<dummy49934>": 49934,
197
+ "<dummy49935>": 49935,
198
+ "<dummy49936>": 49936,
199
+ "<dummy49937>": 49937,
200
+ "<dummy49938>": 49938,
201
+ "<dummy49939>": 49939,
202
+ "<dummy49940>": 49940,
203
+ "<dummy49941>": 49941,
204
+ "<dummy49942>": 49942,
205
+ "<dummy49943>": 49943,
206
+ "<dummy49944>": 49944,
207
+ "<dummy49945>": 49945,
208
+ "<dummy49946>": 49946,
209
+ "<dummy49947>": 49947,
210
+ "<dummy49948>": 49948,
211
+ "<dummy49949>": 49949,
212
+ "<dummy49950>": 49950,
213
+ "<dummy49951>": 49951,
214
+ "<dummy49952>": 49952,
215
+ "<dummy49953>": 49953,
216
+ "<dummy49954>": 49954,
217
+ "<dummy49955>": 49955,
218
+ "<dummy49956>": 49956,
219
+ "<dummy49957>": 49957,
220
+ "<dummy49958>": 49958,
221
+ "<dummy49959>": 49959,
222
+ "<dummy49960>": 49960,
223
+ "<dummy49961>": 49961,
224
+ "<dummy49962>": 49962,
225
+ "<dummy49963>": 49963,
226
+ "<dummy49964>": 49964,
227
+ "<dummy49965>": 49965,
228
+ "<dummy49966>": 49966,
229
+ "<dummy49967>": 49967,
230
+ "<dummy49968>": 49968,
231
+ "<dummy49969>": 49969,
232
+ "<dummy49970>": 49970,
233
+ "<dummy49971>": 49971,
234
+ "<dummy49972>": 49972,
235
+ "<dummy49973>": 49973,
236
+ "<dummy49974>": 49974,
237
+ "<dummy49975>": 49975,
238
+ "<dummy49976>": 49976,
239
+ "<dummy49977>": 49977,
240
+ "<dummy49978>": 49978,
241
+ "<dummy49979>": 49979,
242
+ "<dummy49980>": 49980,
243
+ "<dummy49981>": 49981,
244
+ "<dummy49982>": 49982,
245
+ "<dummy49983>": 49983,
246
+ "<dummy49984>": 49984,
247
+ "<dummy49985>": 49985,
248
+ "<dummy49986>": 49986,
249
+ "<dummy49987>": 49987,
250
+ "<dummy49988>": 49988,
251
+ "<dummy49989>": 49989,
252
+ "<dummy49990>": 49990,
253
+ "<dummy49991>": 49991,
254
+ "<dummy49992>": 49992,
255
+ "<dummy49993>": 49993,
256
+ "<dummy49994>": 49994,
257
+ "<dummy49995>": 49995,
258
+ "<dummy49996>": 49996,
259
+ "<dummy49997>": 49997,
260
+ "<dummy49998>": 49998,
261
+ "<dummy49999>": 49999
262
+ }
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/workspace/process/stockmark_stockmark-13b/source",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 1,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 13824,
13
+ "max_position_embeddings": 2048,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 40,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 40,
18
+ "pad_token_id": 0,
19
+ "pretraining_tp": 1,
20
+ "rms_norm_eps": 1e-06,
21
+ "rope_scaling": null,
22
+ "rope_theta": 10000.0,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "float16",
25
+ "transformers_version": "4.34.1",
26
+ "use_cache": true,
27
+ "vocab_size": 50000
28
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 0,
3
+ "eos_token_id": 1,
4
+ "pad_token_id": 2,
5
+ "transformers_version": "4.34.1"
6
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec55ba83b5daa6ac83ea35943e47f43e242a281510ebe0e63a066760eca2014c
3
+ size 7616627312
quant_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "zero_point": true,
3
+ "q_group_size": 128,
4
+ "w_bit": 4,
5
+ "version": "GEMM"
6
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "pad_token": "<pad>",
5
+ "unk_token": "<unk>"
6
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "clean_up_tokenization_spaces": false,
4
+ "eos_token": "</s>",
5
+ "model_max_length": 1000000000000000019884624838656,
6
+ "pad_token": "<pad>",
7
+ "padding_side": "right",
8
+ "tokenizer_class": "LlamaTokenizerFast",
9
+ "unk_token": "<unk>",
10
+ "use_default_system_prompt": true
11
+ }