End of training
Browse files- README.md +263 -0
- config.json +29 -0
- generation_config.json +6 -0
- model-00001-of-00003.safetensors +3 -0
- model-00002-of-00003.safetensors +3 -0
- model-00003-of-00003.safetensors +3 -0
- model.safetensors.index.json +298 -0
README.md
ADDED
@@ -0,0 +1,263 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: apache-2.0
|
3 |
+
base_model: mistralai/Mistral-7B-Instruct-v0.1
|
4 |
+
tags:
|
5 |
+
- generated_from_trainer
|
6 |
+
datasets:
|
7 |
+
- openwebtext
|
8 |
+
model-index:
|
9 |
+
- name: Mistral_Sparse_pretraining_80_percent_10000
|
10 |
+
results: []
|
11 |
+
---
|
12 |
+
|
13 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
14 |
+
should probably proofread and complete it, then remove this comment. -->
|
15 |
+
|
16 |
+
# Mistral_Sparse_pretraining_80_percent_10000
|
17 |
+
|
18 |
+
This model is a fine-tuned version of [mistralai/Mistral-7B-Instruct-v0.1](https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.1) on the openwebtext dataset.
|
19 |
+
It achieves the following results on the evaluation set:
|
20 |
+
- Loss: 0.6872
|
21 |
+
|
22 |
+
## Model description
|
23 |
+
|
24 |
+
More information needed
|
25 |
+
|
26 |
+
## Intended uses & limitations
|
27 |
+
|
28 |
+
More information needed
|
29 |
+
|
30 |
+
## Training and evaluation data
|
31 |
+
|
32 |
+
More information needed
|
33 |
+
|
34 |
+
## Training procedure
|
35 |
+
|
36 |
+
### Training hyperparameters
|
37 |
+
|
38 |
+
The following hyperparameters were used during training:
|
39 |
+
- learning_rate: 1e-05
|
40 |
+
- train_batch_size: 8
|
41 |
+
- eval_batch_size: 32
|
42 |
+
- seed: 0
|
43 |
+
- distributed_type: multi-GPU
|
44 |
+
- num_devices: 6
|
45 |
+
- gradient_accumulation_steps: 2
|
46 |
+
- total_train_batch_size: 96
|
47 |
+
- total_eval_batch_size: 192
|
48 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
49 |
+
- lr_scheduler_type: linear
|
50 |
+
- training_steps: 10000
|
51 |
+
|
52 |
+
### Training results
|
53 |
+
|
54 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
55 |
+
|:-------------:|:-----:|:-----:|:---------------:|
|
56 |
+
| 1.7461 | 0.05 | 50 | 1.7009 |
|
57 |
+
| 1.4034 | 0.1 | 100 | 1.3910 |
|
58 |
+
| 1.2302 | 0.15 | 150 | 1.2330 |
|
59 |
+
| 1.1363 | 0.19 | 200 | 1.1354 |
|
60 |
+
| 1.0699 | 0.24 | 250 | 1.0723 |
|
61 |
+
| 1.0316 | 0.29 | 300 | 1.0284 |
|
62 |
+
| 1.0044 | 0.34 | 350 | 0.9943 |
|
63 |
+
| 0.9719 | 0.39 | 400 | 0.9668 |
|
64 |
+
| 0.9391 | 0.44 | 450 | 0.9430 |
|
65 |
+
| 0.9194 | 0.48 | 500 | 0.9249 |
|
66 |
+
| 0.9131 | 0.53 | 550 | 0.9092 |
|
67 |
+
| 0.877 | 0.58 | 600 | 0.8953 |
|
68 |
+
| 0.8757 | 0.63 | 650 | 0.8852 |
|
69 |
+
| 0.8644 | 0.68 | 700 | 0.8749 |
|
70 |
+
| 0.8625 | 0.73 | 750 | 0.8679 |
|
71 |
+
| 0.867 | 0.78 | 800 | 0.8594 |
|
72 |
+
| 0.852 | 0.82 | 850 | 0.8529 |
|
73 |
+
| 0.8482 | 0.87 | 900 | 0.8473 |
|
74 |
+
| 0.8372 | 0.92 | 950 | 0.8421 |
|
75 |
+
| 0.8391 | 0.97 | 1000 | 0.8366 |
|
76 |
+
| 0.8209 | 1.02 | 1050 | 0.8327 |
|
77 |
+
| 0.8172 | 1.07 | 1100 | 0.8275 |
|
78 |
+
| 0.8094 | 1.11 | 1150 | 0.8247 |
|
79 |
+
| 0.8107 | 1.16 | 1200 | 0.8210 |
|
80 |
+
| 0.8137 | 1.21 | 1250 | 0.8168 |
|
81 |
+
| 0.8122 | 1.26 | 1300 | 0.8143 |
|
82 |
+
| 0.8047 | 1.31 | 1350 | 0.8115 |
|
83 |
+
| 0.804 | 1.36 | 1400 | 0.8083 |
|
84 |
+
| 0.7955 | 1.41 | 1450 | 0.8062 |
|
85 |
+
| 0.7939 | 1.45 | 1500 | 0.8040 |
|
86 |
+
| 0.7835 | 1.5 | 1550 | 0.8019 |
|
87 |
+
| 0.7983 | 1.55 | 1600 | 0.8001 |
|
88 |
+
| 0.7953 | 1.6 | 1650 | 0.7975 |
|
89 |
+
| 0.7903 | 1.65 | 1700 | 0.7945 |
|
90 |
+
| 0.7864 | 1.7 | 1750 | 0.7938 |
|
91 |
+
| 0.7972 | 1.75 | 1800 | 0.7914 |
|
92 |
+
| 0.7855 | 1.79 | 1850 | 0.7905 |
|
93 |
+
| 0.7834 | 1.84 | 1900 | 0.7878 |
|
94 |
+
| 0.7812 | 1.89 | 1950 | 0.7854 |
|
95 |
+
| 0.7865 | 1.94 | 2000 | 0.7847 |
|
96 |
+
| 0.7875 | 1.99 | 2050 | 0.7837 |
|
97 |
+
| 0.7764 | 2.04 | 2100 | 0.7815 |
|
98 |
+
| 0.7676 | 2.08 | 2150 | 0.7807 |
|
99 |
+
| 0.7716 | 2.13 | 2200 | 0.7796 |
|
100 |
+
| 0.777 | 2.18 | 2250 | 0.7781 |
|
101 |
+
| 0.7706 | 2.23 | 2300 | 0.7769 |
|
102 |
+
| 0.7669 | 2.28 | 2350 | 0.7748 |
|
103 |
+
| 0.771 | 2.33 | 2400 | 0.7742 |
|
104 |
+
| 0.7501 | 2.38 | 2450 | 0.7728 |
|
105 |
+
| 0.7653 | 2.42 | 2500 | 0.7713 |
|
106 |
+
| 0.7715 | 2.47 | 2550 | 0.7699 |
|
107 |
+
| 0.7588 | 2.52 | 2600 | 0.7694 |
|
108 |
+
| 0.7665 | 2.57 | 2650 | 0.7676 |
|
109 |
+
| 0.7616 | 2.62 | 2700 | 0.7658 |
|
110 |
+
| 0.7597 | 2.67 | 2750 | 0.7654 |
|
111 |
+
| 0.756 | 2.71 | 2800 | 0.7644 |
|
112 |
+
| 0.7517 | 2.76 | 2850 | 0.7628 |
|
113 |
+
| 0.7561 | 2.81 | 2900 | 0.7628 |
|
114 |
+
| 0.7413 | 2.86 | 2950 | 0.7620 |
|
115 |
+
| 0.7545 | 2.91 | 3000 | 0.7603 |
|
116 |
+
| 0.7442 | 2.96 | 3050 | 0.7592 |
|
117 |
+
| 0.7454 | 3.01 | 3100 | 0.7589 |
|
118 |
+
| 0.7575 | 3.05 | 3150 | 0.7583 |
|
119 |
+
| 0.739 | 3.1 | 3200 | 0.7571 |
|
120 |
+
| 0.7446 | 3.15 | 3250 | 0.7558 |
|
121 |
+
| 0.7428 | 3.2 | 3300 | 0.7557 |
|
122 |
+
| 0.737 | 3.25 | 3350 | 0.7553 |
|
123 |
+
| 0.7512 | 3.3 | 3400 | 0.7536 |
|
124 |
+
| 0.7447 | 3.34 | 3450 | 0.7525 |
|
125 |
+
| 0.7417 | 3.39 | 3500 | 0.7525 |
|
126 |
+
| 0.7403 | 3.44 | 3550 | 0.7512 |
|
127 |
+
| 0.761 | 3.49 | 3600 | 0.7502 |
|
128 |
+
| 0.7475 | 3.54 | 3650 | 0.7498 |
|
129 |
+
| 0.7535 | 3.59 | 3700 | 0.7486 |
|
130 |
+
| 0.733 | 3.64 | 3750 | 0.7483 |
|
131 |
+
| 0.7347 | 3.68 | 3800 | 0.7470 |
|
132 |
+
| 0.7439 | 3.73 | 3850 | 0.7470 |
|
133 |
+
| 0.7417 | 3.78 | 3900 | 0.7460 |
|
134 |
+
| 0.7383 | 3.83 | 3950 | 0.7460 |
|
135 |
+
| 0.7316 | 3.88 | 4000 | 0.7450 |
|
136 |
+
| 0.7273 | 3.93 | 4050 | 0.7442 |
|
137 |
+
| 0.7376 | 3.97 | 4100 | 0.7440 |
|
138 |
+
| 0.73 | 4.02 | 4150 | 0.7424 |
|
139 |
+
| 0.732 | 4.07 | 4200 | 0.7429 |
|
140 |
+
| 0.7278 | 4.12 | 4250 | 0.7419 |
|
141 |
+
| 0.721 | 4.17 | 4300 | 0.7416 |
|
142 |
+
| 0.7309 | 4.22 | 4350 | 0.7410 |
|
143 |
+
| 0.7273 | 4.27 | 4400 | 0.7400 |
|
144 |
+
| 0.7297 | 4.31 | 4450 | 0.7395 |
|
145 |
+
| 0.7321 | 4.36 | 4500 | 0.7385 |
|
146 |
+
| 0.7348 | 4.41 | 4550 | 0.7381 |
|
147 |
+
| 0.7251 | 4.46 | 4600 | 0.7371 |
|
148 |
+
| 0.7175 | 4.51 | 4650 | 0.7372 |
|
149 |
+
| 0.7356 | 4.56 | 4700 | 0.7368 |
|
150 |
+
| 0.7306 | 4.6 | 4750 | 0.7363 |
|
151 |
+
| 0.7248 | 4.65 | 4800 | 0.7359 |
|
152 |
+
| 0.7266 | 4.7 | 4850 | 0.7343 |
|
153 |
+
| 0.7243 | 4.75 | 4900 | 0.7349 |
|
154 |
+
| 0.7256 | 4.8 | 4950 | 0.7338 |
|
155 |
+
| 0.7301 | 4.85 | 5000 | 0.7335 |
|
156 |
+
| 0.7266 | 4.9 | 5050 | 0.7327 |
|
157 |
+
| 0.7229 | 4.94 | 5100 | 0.7321 |
|
158 |
+
| 0.7355 | 4.99 | 5150 | 0.7315 |
|
159 |
+
| 0.7207 | 5.04 | 5200 | 0.7317 |
|
160 |
+
| 0.7157 | 5.09 | 5250 | 0.7314 |
|
161 |
+
| 0.7214 | 5.14 | 5300 | 0.7299 |
|
162 |
+
| 0.7104 | 5.19 | 5350 | 0.7304 |
|
163 |
+
| 0.7059 | 5.24 | 5400 | 0.7296 |
|
164 |
+
| 0.7181 | 5.28 | 5450 | 0.7295 |
|
165 |
+
| 0.7226 | 5.33 | 5500 | 0.7286 |
|
166 |
+
| 0.7077 | 5.38 | 5550 | 0.7282 |
|
167 |
+
| 0.7239 | 5.43 | 5600 | 0.7276 |
|
168 |
+
| 0.7159 | 5.48 | 5650 | 0.7277 |
|
169 |
+
| 0.7169 | 5.53 | 5700 | 0.7271 |
|
170 |
+
| 0.7101 | 5.57 | 5750 | 0.7269 |
|
171 |
+
| 0.7146 | 5.62 | 5800 | 0.7262 |
|
172 |
+
| 0.7191 | 5.67 | 5850 | 0.7265 |
|
173 |
+
| 0.7124 | 5.72 | 5900 | 0.7248 |
|
174 |
+
| 0.7085 | 5.77 | 5950 | 0.7238 |
|
175 |
+
| 0.7052 | 5.82 | 6000 | 0.7235 |
|
176 |
+
| 0.7222 | 5.87 | 6050 | 0.7222 |
|
177 |
+
| 0.7089 | 5.91 | 6100 | 0.7221 |
|
178 |
+
| 0.7088 | 5.96 | 6150 | 0.7222 |
|
179 |
+
| 0.7017 | 6.01 | 6200 | 0.7218 |
|
180 |
+
| 0.7079 | 6.06 | 6250 | 0.7218 |
|
181 |
+
| 0.7209 | 6.11 | 6300 | 0.7211 |
|
182 |
+
| 0.691 | 6.16 | 6350 | 0.7210 |
|
183 |
+
| 0.7035 | 6.2 | 6400 | 0.7203 |
|
184 |
+
| 0.7075 | 6.25 | 6450 | 0.7207 |
|
185 |
+
| 0.7036 | 6.3 | 6500 | 0.7200 |
|
186 |
+
| 0.7023 | 6.35 | 6550 | 0.7189 |
|
187 |
+
| 0.7201 | 6.4 | 6600 | 0.7192 |
|
188 |
+
| 0.7021 | 6.45 | 6650 | 0.7188 |
|
189 |
+
| 0.6971 | 6.5 | 6700 | 0.7174 |
|
190 |
+
| 0.7087 | 6.54 | 6750 | 0.7184 |
|
191 |
+
| 0.7044 | 6.59 | 6800 | 0.7176 |
|
192 |
+
| 0.6921 | 6.64 | 6850 | 0.7179 |
|
193 |
+
| 0.7079 | 6.69 | 6900 | 0.7166 |
|
194 |
+
| 0.6908 | 6.74 | 6950 | 0.7158 |
|
195 |
+
| 0.687 | 6.79 | 7000 | 0.7158 |
|
196 |
+
| 0.696 | 6.83 | 7050 | 0.7148 |
|
197 |
+
| 0.6954 | 6.88 | 7100 | 0.7152 |
|
198 |
+
| 0.7103 | 6.93 | 7150 | 0.7143 |
|
199 |
+
| 0.6999 | 6.98 | 7200 | 0.7140 |
|
200 |
+
| 0.699 | 7.03 | 7250 | 0.7138 |
|
201 |
+
| 0.6959 | 7.08 | 7300 | 0.7138 |
|
202 |
+
| 0.6871 | 7.13 | 7350 | 0.7122 |
|
203 |
+
| 0.6941 | 7.17 | 7400 | 0.7131 |
|
204 |
+
| 0.6931 | 7.22 | 7450 | 0.7132 |
|
205 |
+
| 0.707 | 7.27 | 7500 | 0.7110 |
|
206 |
+
| 0.6911 | 7.32 | 7550 | 0.7122 |
|
207 |
+
| 0.7036 | 7.37 | 7600 | 0.7113 |
|
208 |
+
| 0.7105 | 7.42 | 7650 | 0.7107 |
|
209 |
+
| 0.7035 | 7.46 | 7700 | 0.7108 |
|
210 |
+
| 0.6901 | 7.51 | 7750 | 0.7113 |
|
211 |
+
| 0.6944 | 7.56 | 7800 | 0.7096 |
|
212 |
+
| 0.6927 | 7.61 | 7850 | 0.7093 |
|
213 |
+
| 0.7052 | 7.66 | 7900 | 0.7090 |
|
214 |
+
| 0.7046 | 7.71 | 7950 | 0.7082 |
|
215 |
+
| 0.6949 | 7.76 | 8000 | 0.7082 |
|
216 |
+
| 0.6888 | 7.8 | 8050 | 0.7071 |
|
217 |
+
| 0.6916 | 7.85 | 8100 | 0.7071 |
|
218 |
+
| 0.6937 | 7.9 | 8150 | 0.7067 |
|
219 |
+
| 0.7077 | 7.95 | 8200 | 0.7066 |
|
220 |
+
| 0.6847 | 8.0 | 8250 | 0.7057 |
|
221 |
+
| 0.6908 | 8.05 | 8300 | 0.7056 |
|
222 |
+
| 0.6813 | 8.1 | 8350 | 0.7060 |
|
223 |
+
| 0.6756 | 8.14 | 8400 | 0.7055 |
|
224 |
+
| 0.7006 | 8.19 | 8450 | 0.7052 |
|
225 |
+
| 0.6842 | 8.24 | 8500 | 0.7035 |
|
226 |
+
| 0.6851 | 8.29 | 8550 | 0.7044 |
|
227 |
+
| 0.6944 | 8.34 | 8600 | 0.7042 |
|
228 |
+
| 0.6929 | 8.39 | 8650 | 0.7040 |
|
229 |
+
| 0.6924 | 8.43 | 8700 | 0.7037 |
|
230 |
+
| 0.6843 | 8.48 | 8750 | 0.7037 |
|
231 |
+
| 0.7005 | 8.53 | 8800 | 0.7028 |
|
232 |
+
| 0.6795 | 8.58 | 8850 | 0.7022 |
|
233 |
+
| 0.6946 | 8.63 | 8900 | 0.7019 |
|
234 |
+
| 0.6761 | 8.68 | 8950 | 0.7016 |
|
235 |
+
| 0.6817 | 8.73 | 9000 | 0.7012 |
|
236 |
+
| 0.6838 | 8.77 | 9050 | 0.7012 |
|
237 |
+
| 0.6877 | 8.82 | 9100 | 0.7006 |
|
238 |
+
| 0.6812 | 8.87 | 9150 | 0.7004 |
|
239 |
+
| 0.6966 | 8.92 | 9200 | 0.7005 |
|
240 |
+
| 0.6778 | 8.97 | 9250 | 0.6993 |
|
241 |
+
| 0.6844 | 9.02 | 9300 | 0.6991 |
|
242 |
+
| 0.6853 | 9.06 | 9350 | 0.7000 |
|
243 |
+
| 0.6839 | 9.11 | 9400 | 0.6998 |
|
244 |
+
| 0.6813 | 9.16 | 9450 | 0.6984 |
|
245 |
+
| 0.6903 | 9.21 | 9500 | 0.6985 |
|
246 |
+
| 0.6819 | 9.26 | 9550 | 0.6987 |
|
247 |
+
| 0.6749 | 9.31 | 9600 | 0.6980 |
|
248 |
+
| 0.6782 | 9.36 | 9650 | 0.6979 |
|
249 |
+
| 0.6805 | 9.4 | 9700 | 0.6975 |
|
250 |
+
| 0.6907 | 9.45 | 9750 | 0.6974 |
|
251 |
+
| 0.6854 | 9.5 | 9800 | 0.6967 |
|
252 |
+
| 0.6803 | 9.55 | 9850 | 0.6969 |
|
253 |
+
| 0.6854 | 9.6 | 9900 | 0.6964 |
|
254 |
+
| 0.6761 | 9.65 | 9950 | 0.6966 |
|
255 |
+
| 0.6939 | 9.69 | 10000 | 0.6959 |
|
256 |
+
|
257 |
+
|
258 |
+
### Framework versions
|
259 |
+
|
260 |
+
- Transformers 4.35.2
|
261 |
+
- Pytorch 2.1.1+cu121
|
262 |
+
- Datasets 2.15.0
|
263 |
+
- Tokenizers 0.15.0
|
config.json
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "mistralai/Mistral-7B-Instruct-v0.1",
|
3 |
+
"architectures": [
|
4 |
+
"SparseMistral"
|
5 |
+
],
|
6 |
+
"bos_token_id": 1,
|
7 |
+
"eos_token_id": 2,
|
8 |
+
"hidden_act": "silu",
|
9 |
+
"hidden_size": 4096,
|
10 |
+
"initializer_range": 0.02,
|
11 |
+
"intermediate_size": 14336,
|
12 |
+
"max_position_embeddings": 131072,
|
13 |
+
"model_type": "mistral",
|
14 |
+
"num_attention_heads": 32,
|
15 |
+
"num_hidden_layers": 32,
|
16 |
+
"num_key_value_heads": 8,
|
17 |
+
"regularization_coefficient": 10,
|
18 |
+
"regularization_type": "L1 Regularization",
|
19 |
+
"rms_norm_eps": 1e-06,
|
20 |
+
"rope_theta": 10000.0,
|
21 |
+
"sliding_window": 4096,
|
22 |
+
"tie_word_embeddings": false,
|
23 |
+
"torch_dtype": "bfloat16",
|
24 |
+
"transformers_version": "4.35.2",
|
25 |
+
"use_cache": true,
|
26 |
+
"use_sparse_model": false,
|
27 |
+
"use_sparse_regularization": false,
|
28 |
+
"vocab_size": 32000
|
29 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 1,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"transformers_version": "4.35.2"
|
6 |
+
}
|
model-00001-of-00003.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8b0e6b647518c824c5801de217f27c5f1e1c1d535d86be8b4e7bf2fba8300f53
|
3 |
+
size 4943163992
|
model-00002-of-00003.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:71e708ea67da606a7d8143eb9b2a7159d9871de307c8a408e0d94823251791c8
|
3 |
+
size 4999821144
|
model-00003-of-00003.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c60f96fc3a7c401b5f1c1685f4d08ecef39d55fa265743d55b91bddc95f7e725
|
3 |
+
size 4540517840
|
model.safetensors.index.json
ADDED
@@ -0,0 +1,298 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 14483464192
|
4 |
+
},
|
5 |
+
"weight_map": {
|
6 |
+
"base_model.model.lm_head.weight": "model-00003-of-00003.safetensors",
|
7 |
+
"base_model.model.model.embed_tokens.weight": "model-00001-of-00003.safetensors",
|
8 |
+
"base_model.model.model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
9 |
+
"base_model.model.model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
10 |
+
"base_model.model.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
11 |
+
"base_model.model.model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
12 |
+
"base_model.model.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
13 |
+
"base_model.model.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
14 |
+
"base_model.model.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
15 |
+
"base_model.model.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
16 |
+
"base_model.model.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
17 |
+
"base_model.model.model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
18 |
+
"base_model.model.model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
19 |
+
"base_model.model.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
20 |
+
"base_model.model.model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
21 |
+
"base_model.model.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
22 |
+
"base_model.model.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
23 |
+
"base_model.model.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
24 |
+
"base_model.model.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
25 |
+
"base_model.model.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
26 |
+
"base_model.model.model.layers.10.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
27 |
+
"base_model.model.model.layers.10.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
28 |
+
"base_model.model.model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
29 |
+
"base_model.model.model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
30 |
+
"base_model.model.model.layers.10.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
31 |
+
"base_model.model.model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
32 |
+
"base_model.model.model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
33 |
+
"base_model.model.model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
34 |
+
"base_model.model.model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
35 |
+
"base_model.model.model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
36 |
+
"base_model.model.model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
37 |
+
"base_model.model.model.layers.11.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
38 |
+
"base_model.model.model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
39 |
+
"base_model.model.model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
40 |
+
"base_model.model.model.layers.11.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
41 |
+
"base_model.model.model.layers.11.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
42 |
+
"base_model.model.model.layers.11.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
43 |
+
"base_model.model.model.layers.11.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
44 |
+
"base_model.model.model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
45 |
+
"base_model.model.model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
46 |
+
"base_model.model.model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
47 |
+
"base_model.model.model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
48 |
+
"base_model.model.model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
49 |
+
"base_model.model.model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
50 |
+
"base_model.model.model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
51 |
+
"base_model.model.model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
52 |
+
"base_model.model.model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
53 |
+
"base_model.model.model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
54 |
+
"base_model.model.model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
55 |
+
"base_model.model.model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
56 |
+
"base_model.model.model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
57 |
+
"base_model.model.model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
58 |
+
"base_model.model.model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
59 |
+
"base_model.model.model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
60 |
+
"base_model.model.model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
61 |
+
"base_model.model.model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
62 |
+
"base_model.model.model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
63 |
+
"base_model.model.model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
64 |
+
"base_model.model.model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
65 |
+
"base_model.model.model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
66 |
+
"base_model.model.model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
67 |
+
"base_model.model.model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
68 |
+
"base_model.model.model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
69 |
+
"base_model.model.model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
70 |
+
"base_model.model.model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
71 |
+
"base_model.model.model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
72 |
+
"base_model.model.model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
73 |
+
"base_model.model.model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
74 |
+
"base_model.model.model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
75 |
+
"base_model.model.model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
76 |
+
"base_model.model.model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
77 |
+
"base_model.model.model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
78 |
+
"base_model.model.model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
79 |
+
"base_model.model.model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
80 |
+
"base_model.model.model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
81 |
+
"base_model.model.model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
82 |
+
"base_model.model.model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
83 |
+
"base_model.model.model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
84 |
+
"base_model.model.model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
85 |
+
"base_model.model.model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
86 |
+
"base_model.model.model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
87 |
+
"base_model.model.model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
88 |
+
"base_model.model.model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
89 |
+
"base_model.model.model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
90 |
+
"base_model.model.model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
91 |
+
"base_model.model.model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
92 |
+
"base_model.model.model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
93 |
+
"base_model.model.model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
94 |
+
"base_model.model.model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
95 |
+
"base_model.model.model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
96 |
+
"base_model.model.model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
97 |
+
"base_model.model.model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
98 |
+
"base_model.model.model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
99 |
+
"base_model.model.model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
100 |
+
"base_model.model.model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
101 |
+
"base_model.model.model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
102 |
+
"base_model.model.model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
103 |
+
"base_model.model.model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
104 |
+
"base_model.model.model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
105 |
+
"base_model.model.model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
106 |
+
"base_model.model.model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
107 |
+
"base_model.model.model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
108 |
+
"base_model.model.model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
109 |
+
"base_model.model.model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
110 |
+
"base_model.model.model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
111 |
+
"base_model.model.model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
112 |
+
"base_model.model.model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
113 |
+
"base_model.model.model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
114 |
+
"base_model.model.model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
115 |
+
"base_model.model.model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
116 |
+
"base_model.model.model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
117 |
+
"base_model.model.model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
118 |
+
"base_model.model.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
119 |
+
"base_model.model.model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
120 |
+
"base_model.model.model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
121 |
+
"base_model.model.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
122 |
+
"base_model.model.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
123 |
+
"base_model.model.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
124 |
+
"base_model.model.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
125 |
+
"base_model.model.model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
126 |
+
"base_model.model.model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
127 |
+
"base_model.model.model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
128 |
+
"base_model.model.model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
129 |
+
"base_model.model.model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
130 |
+
"base_model.model.model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
131 |
+
"base_model.model.model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
132 |
+
"base_model.model.model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
133 |
+
"base_model.model.model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
134 |
+
"base_model.model.model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
135 |
+
"base_model.model.model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
136 |
+
"base_model.model.model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
137 |
+
"base_model.model.model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
138 |
+
"base_model.model.model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
139 |
+
"base_model.model.model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
140 |
+
"base_model.model.model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
141 |
+
"base_model.model.model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
142 |
+
"base_model.model.model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
143 |
+
"base_model.model.model.layers.22.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
144 |
+
"base_model.model.model.layers.22.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
145 |
+
"base_model.model.model.layers.22.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
146 |
+
"base_model.model.model.layers.22.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
147 |
+
"base_model.model.model.layers.22.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
148 |
+
"base_model.model.model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
149 |
+
"base_model.model.model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
150 |
+
"base_model.model.model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
151 |
+
"base_model.model.model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
152 |
+
"base_model.model.model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
153 |
+
"base_model.model.model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
154 |
+
"base_model.model.model.layers.23.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
155 |
+
"base_model.model.model.layers.23.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
156 |
+
"base_model.model.model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
157 |
+
"base_model.model.model.layers.23.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
158 |
+
"base_model.model.model.layers.23.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
159 |
+
"base_model.model.model.layers.23.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
160 |
+
"base_model.model.model.layers.23.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
161 |
+
"base_model.model.model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
162 |
+
"base_model.model.model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
163 |
+
"base_model.model.model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
164 |
+
"base_model.model.model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
165 |
+
"base_model.model.model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
166 |
+
"base_model.model.model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
167 |
+
"base_model.model.model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
168 |
+
"base_model.model.model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
169 |
+
"base_model.model.model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
170 |
+
"base_model.model.model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
171 |
+
"base_model.model.model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
172 |
+
"base_model.model.model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
173 |
+
"base_model.model.model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
174 |
+
"base_model.model.model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
175 |
+
"base_model.model.model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
176 |
+
"base_model.model.model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
177 |
+
"base_model.model.model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
178 |
+
"base_model.model.model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
179 |
+
"base_model.model.model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
180 |
+
"base_model.model.model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
181 |
+
"base_model.model.model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
182 |
+
"base_model.model.model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
183 |
+
"base_model.model.model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
184 |
+
"base_model.model.model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
185 |
+
"base_model.model.model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
186 |
+
"base_model.model.model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
187 |
+
"base_model.model.model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
188 |
+
"base_model.model.model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
189 |
+
"base_model.model.model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
190 |
+
"base_model.model.model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
191 |
+
"base_model.model.model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
192 |
+
"base_model.model.model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
193 |
+
"base_model.model.model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
194 |
+
"base_model.model.model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
195 |
+
"base_model.model.model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
196 |
+
"base_model.model.model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
197 |
+
"base_model.model.model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
198 |
+
"base_model.model.model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
199 |
+
"base_model.model.model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
200 |
+
"base_model.model.model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
201 |
+
"base_model.model.model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
202 |
+
"base_model.model.model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
203 |
+
"base_model.model.model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
204 |
+
"base_model.model.model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
205 |
+
"base_model.model.model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
206 |
+
"base_model.model.model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
207 |
+
"base_model.model.model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
208 |
+
"base_model.model.model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
209 |
+
"base_model.model.model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
210 |
+
"base_model.model.model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
211 |
+
"base_model.model.model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
212 |
+
"base_model.model.model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
213 |
+
"base_model.model.model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
214 |
+
"base_model.model.model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
215 |
+
"base_model.model.model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
216 |
+
"base_model.model.model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
217 |
+
"base_model.model.model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
218 |
+
"base_model.model.model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
219 |
+
"base_model.model.model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
220 |
+
"base_model.model.model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
221 |
+
"base_model.model.model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
222 |
+
"base_model.model.model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
223 |
+
"base_model.model.model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
224 |
+
"base_model.model.model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
225 |
+
"base_model.model.model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
226 |
+
"base_model.model.model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
227 |
+
"base_model.model.model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
228 |
+
"base_model.model.model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
229 |
+
"base_model.model.model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
230 |
+
"base_model.model.model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
231 |
+
"base_model.model.model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
232 |
+
"base_model.model.model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
233 |
+
"base_model.model.model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
234 |
+
"base_model.model.model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
235 |
+
"base_model.model.model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
236 |
+
"base_model.model.model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
237 |
+
"base_model.model.model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
238 |
+
"base_model.model.model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
239 |
+
"base_model.model.model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
240 |
+
"base_model.model.model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
241 |
+
"base_model.model.model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
242 |
+
"base_model.model.model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
243 |
+
"base_model.model.model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
244 |
+
"base_model.model.model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
245 |
+
"base_model.model.model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
246 |
+
"base_model.model.model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
247 |
+
"base_model.model.model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
248 |
+
"base_model.model.model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
249 |
+
"base_model.model.model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
250 |
+
"base_model.model.model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
251 |
+
"base_model.model.model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
252 |
+
"base_model.model.model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
253 |
+
"base_model.model.model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
254 |
+
"base_model.model.model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
255 |
+
"base_model.model.model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
256 |
+
"base_model.model.model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
257 |
+
"base_model.model.model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
258 |
+
"base_model.model.model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
259 |
+
"base_model.model.model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
260 |
+
"base_model.model.model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
261 |
+
"base_model.model.model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
262 |
+
"base_model.model.model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
263 |
+
"base_model.model.model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
264 |
+
"base_model.model.model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
265 |
+
"base_model.model.model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
266 |
+
"base_model.model.model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
267 |
+
"base_model.model.model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
268 |
+
"base_model.model.model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
269 |
+
"base_model.model.model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
270 |
+
"base_model.model.model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
271 |
+
"base_model.model.model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
272 |
+
"base_model.model.model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
273 |
+
"base_model.model.model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
274 |
+
"base_model.model.model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
275 |
+
"base_model.model.model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
276 |
+
"base_model.model.model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
277 |
+
"base_model.model.model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
278 |
+
"base_model.model.model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
279 |
+
"base_model.model.model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
280 |
+
"base_model.model.model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
281 |
+
"base_model.model.model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
282 |
+
"base_model.model.model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
283 |
+
"base_model.model.model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
284 |
+
"base_model.model.model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
285 |
+
"base_model.model.model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
286 |
+
"base_model.model.model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
287 |
+
"base_model.model.model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
288 |
+
"base_model.model.model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
289 |
+
"base_model.model.model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
290 |
+
"base_model.model.model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
291 |
+
"base_model.model.model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
292 |
+
"base_model.model.model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
293 |
+
"base_model.model.model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
294 |
+
"base_model.model.model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
295 |
+
"base_model.model.model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
296 |
+
"base_model.model.model.norm.weight": "model-00003-of-00003.safetensors"
|
297 |
+
}
|
298 |
+
}
|