pankajroark commited on
Commit
cb60fd6
·
verified ·
1 Parent(s): 8dce473

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. .gitattributes +2 -0
  2. config.json +213 -0
  3. part_aa +3 -0
  4. part_ab +3 -0
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ part_aa filter=lfs diff=lfs merge=lfs -text
37
+ part_ab filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,213 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "producer": {
3
+ "name": "modelopt",
4
+ "version": "0.19.0"
5
+ },
6
+ "architecture": "LlamaForCausalLM",
7
+ "dtype": "float16",
8
+ "logits_dtype": "float16",
9
+ "num_hidden_layers": 80,
10
+ "num_attention_heads": 64,
11
+ "num_key_value_heads": 8,
12
+ "hidden_size": 8192,
13
+ "norm_epsilon": 1e-05,
14
+ "vocab_size": 128256,
15
+ "max_position_embeddings": 131072,
16
+ "hidden_act": "silu",
17
+ "use_parallel_embedding": true,
18
+ "embedding_sharding_dim": 0,
19
+ "quantization": {
20
+ "quant_algo": "FP8",
21
+ "kv_cache_quant_algo": "FP8",
22
+ "exclude_modules": [
23
+ "transformer.layers.64.input_layernorm",
24
+ "transformer.layers.20.input_layernorm",
25
+ "transformer.layers.13.post_layernorm",
26
+ "transformer.layers.14.input_layernorm",
27
+ "transformer.layers.63.post_layernorm",
28
+ "transformer.layers.8.input_layernorm",
29
+ "transformer.layers.48.post_layernorm",
30
+ "transformer.layers.34.input_layernorm",
31
+ "transformer.layers.22.input_layernorm",
32
+ "transformer.layers.6.input_layernorm",
33
+ "transformer.vocab_embedding",
34
+ "transformer.layers.7.input_layernorm",
35
+ "transformer.layers.18.post_layernorm",
36
+ "transformer.layers.71.post_layernorm",
37
+ "transformer.layers.6.post_layernorm",
38
+ "transformer.layers.53.post_layernorm",
39
+ "transformer.layers.22.post_layernorm",
40
+ "transformer.layers.28.post_layernorm",
41
+ "transformer.layers.35.post_layernorm",
42
+ "transformer.layers.70.post_layernorm",
43
+ "transformer.layers.1.post_layernorm",
44
+ "transformer.layers.46.input_layernorm",
45
+ "transformer.layers.32.post_layernorm",
46
+ "transformer.layers.55.post_layernorm",
47
+ "transformer.layers.11.post_layernorm",
48
+ "transformer.layers.73.input_layernorm",
49
+ "transformer.layers.68.post_layernorm",
50
+ "transformer.layers.10.post_layernorm",
51
+ "transformer.layers.33.input_layernorm",
52
+ "transformer.layers.55.input_layernorm",
53
+ "transformer.layers.19.post_layernorm",
54
+ "transformer.layers.61.input_layernorm",
55
+ "transformer.layers.14.post_layernorm",
56
+ "transformer.layers.51.post_layernorm",
57
+ "transformer.layers.9.post_layernorm",
58
+ "transformer.layers.7.post_layernorm",
59
+ "transformer.layers.1.input_layernorm",
60
+ "transformer.layers.57.input_layernorm",
61
+ "transformer.layers.46.post_layernorm",
62
+ "transformer.layers.27.input_layernorm",
63
+ "transformer.layers.66.post_layernorm",
64
+ "transformer.layers.41.post_layernorm",
65
+ "transformer.layers.23.input_layernorm",
66
+ "transformer.layers.51.input_layernorm",
67
+ "transformer.layers.39.input_layernorm",
68
+ "transformer.layers.3.post_layernorm",
69
+ "transformer.layers.10.input_layernorm",
70
+ "transformer.layers.62.post_layernorm",
71
+ "transformer.layers.66.input_layernorm",
72
+ "lm_head",
73
+ "transformer.layers.41.input_layernorm",
74
+ "transformer.layers.45.post_layernorm",
75
+ "transformer.layers.30.input_layernorm",
76
+ "transformer.layers.37.input_layernorm",
77
+ "transformer.layers.54.post_layernorm",
78
+ "transformer.layers.77.post_layernorm",
79
+ "transformer.layers.58.post_layernorm",
80
+ "transformer.layers.4.input_layernorm",
81
+ "transformer.layers.54.input_layernorm",
82
+ "transformer.layers.56.post_layernorm",
83
+ "transformer.layers.61.post_layernorm",
84
+ "transformer.layers.52.post_layernorm",
85
+ "transformer.layers.48.input_layernorm",
86
+ "transformer.layers.49.input_layernorm",
87
+ "transformer.layers.77.input_layernorm",
88
+ "transformer.layers.2.input_layernorm",
89
+ "transformer.layers.53.input_layernorm",
90
+ "transformer.layers.79.post_layernorm",
91
+ "transformer.layers.45.input_layernorm",
92
+ "transformer.layers.19.input_layernorm",
93
+ "transformer.layers.31.input_layernorm",
94
+ "transformer.layers.56.input_layernorm",
95
+ "transformer.layers.11.input_layernorm",
96
+ "transformer.layers.15.input_layernorm",
97
+ "transformer.layers.52.input_layernorm",
98
+ "transformer.layers.16.input_layernorm",
99
+ "transformer.layers.30.post_layernorm",
100
+ "transformer.layers.21.post_layernorm",
101
+ "transformer.layers.4.post_layernorm",
102
+ "transformer.layers.17.input_layernorm",
103
+ "transformer.layers.0.post_layernorm",
104
+ "transformer.layers.60.post_layernorm",
105
+ "transformer.layers.59.input_layernorm",
106
+ "transformer.layers.44.post_layernorm",
107
+ "transformer.layers.49.post_layernorm",
108
+ "transformer.layers.70.input_layernorm",
109
+ "transformer.layers.78.post_layernorm",
110
+ "transformer.layers.23.post_layernorm",
111
+ "transformer.layers.42.input_layernorm",
112
+ "transformer.layers.32.input_layernorm",
113
+ "transformer.layers.18.input_layernorm",
114
+ "transformer.layers.25.input_layernorm",
115
+ "transformer.layers.39.post_layernorm",
116
+ "transformer.layers.59.post_layernorm",
117
+ "transformer.layers.74.input_layernorm",
118
+ "transformer.layers.2.post_layernorm",
119
+ "transformer.layers.67.input_layernorm",
120
+ "transformer.layers.44.input_layernorm",
121
+ "transformer.layers.13.input_layernorm",
122
+ "transformer.layers.74.post_layernorm",
123
+ "transformer.layers.76.input_layernorm",
124
+ "transformer.layers.63.input_layernorm",
125
+ "transformer.layers.79.input_layernorm",
126
+ "transformer.layers.65.input_layernorm",
127
+ "transformer.layers.26.post_layernorm",
128
+ "transformer.layers.40.input_layernorm",
129
+ "transformer.layers.67.post_layernorm",
130
+ "transformer.layers.36.post_layernorm",
131
+ "transformer.layers.37.post_layernorm",
132
+ "transformer.layers.47.input_layernorm",
133
+ "transformer.layers.64.post_layernorm",
134
+ "transformer.layers.29.input_layernorm",
135
+ "transformer.layers.60.input_layernorm",
136
+ "transformer.layers.16.post_layernorm",
137
+ "transformer.layers.20.post_layernorm",
138
+ "transformer.layers.62.input_layernorm",
139
+ "transformer.layers.69.input_layernorm",
140
+ "transformer.layers.26.input_layernorm",
141
+ "transformer.layers.8.post_layernorm",
142
+ "transformer.layers.50.post_layernorm",
143
+ "transformer.layers.17.post_layernorm",
144
+ "transformer.layers.5.input_layernorm",
145
+ "transformer.layers.43.input_layernorm",
146
+ "transformer.layers.58.input_layernorm",
147
+ "transformer.layers.0.input_layernorm",
148
+ "transformer.layers.75.input_layernorm",
149
+ "transformer.layers.29.post_layernorm",
150
+ "transformer.layers.47.post_layernorm",
151
+ "transformer.layers.31.post_layernorm",
152
+ "transformer.layers.38.post_layernorm",
153
+ "transformer.layers.15.post_layernorm",
154
+ "transformer.layers.40.post_layernorm",
155
+ "transformer.layers.5.post_layernorm",
156
+ "transformer.layers.76.post_layernorm",
157
+ "transformer.layers.65.post_layernorm",
158
+ "transformer.layers.42.post_layernorm",
159
+ "transformer.layers.69.post_layernorm",
160
+ "transformer.layers.9.input_layernorm",
161
+ "transformer.layers.71.input_layernorm",
162
+ "transformer.layers.75.post_layernorm",
163
+ "transformer.layers.12.post_layernorm",
164
+ "transformer.ln_f",
165
+ "transformer.layers.12.input_layernorm",
166
+ "transformer.layers.3.input_layernorm",
167
+ "transformer.layers.72.post_layernorm",
168
+ "transformer.layers.24.post_layernorm",
169
+ "transformer.layers.50.input_layernorm",
170
+ "transformer.layers.21.input_layernorm",
171
+ "transformer.layers.68.input_layernorm",
172
+ "transformer.layers.78.input_layernorm",
173
+ "transformer.layers.73.post_layernorm",
174
+ "transformer.layers.35.input_layernorm",
175
+ "transformer.layers.43.post_layernorm",
176
+ "transformer.layers.33.post_layernorm",
177
+ "transformer.layers.34.post_layernorm",
178
+ "transformer.layers.24.input_layernorm",
179
+ "transformer.layers.27.post_layernorm",
180
+ "transformer.layers.72.input_layernorm",
181
+ "transformer.layers.38.input_layernorm",
182
+ "transformer.layers.57.post_layernorm",
183
+ "transformer.layers.25.post_layernorm",
184
+ "transformer.layers.36.input_layernorm",
185
+ "transformer.layers.28.input_layernorm"
186
+ ]
187
+ },
188
+ "mapping": {
189
+ "world_size": 1,
190
+ "tp_size": 1,
191
+ "pp_size": 1
192
+ },
193
+ "head_size": 128,
194
+ "intermediate_size": 28672,
195
+ "position_embedding_type": "rope_gpt_neox",
196
+ "share_embedding_table": false,
197
+ "residual_mlp": false,
198
+ "bias": false,
199
+ "rotary_pct": 1.0,
200
+ "rank": 0,
201
+ "decoder": "llama",
202
+ "rmsnorm": true,
203
+ "lm_head_bias": false,
204
+ "rotary_base": 500000.0,
205
+ "rotary_scaling": {
206
+ "factor": 8.0,
207
+ "low_freq_factor": 1.0,
208
+ "high_freq_factor": 4.0,
209
+ "original_max_position_embeddings": 8192,
210
+ "rope_type": "llama3"
211
+ },
212
+ "model_type": "llama"
213
+ }
part_aa ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d176a7066241d2f662bf7052501126ebd5db5b0b8acc907a5fa984373f7b153
3
+ size 37580963840
part_ab ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:854d3645a1693da94f439a06a863a75674dfb5f8486ede6fef90f02e013a12cd
3
+ size 35075579704