File size: 30,065 Bytes
5018752
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
{
  "metadata": {
    "total_size": 28992094208
  },
  "weight_map": {
    "tf_mistral_for_causal_lm/lm_head/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/embed_tokens/embeddings:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.0/input_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.0/mlp/down_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.0/mlp/gate_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.0/mlp/up_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.0/post_attention_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.0/self_attn/k_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.0/self_attn/o_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.0/self_attn/q_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.0/self_attn/v_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.1/input_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.1/mlp/down_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.1/mlp/gate_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.1/mlp/up_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.1/post_attention_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.1/self_attn/k_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.1/self_attn/o_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.1/self_attn/q_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.1/self_attn/v_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.10/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.10/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.10/mlp/gate_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.10/mlp/up_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.10/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.10/self_attn/k_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.10/self_attn/o_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.10/self_attn/q_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.10/self_attn/v_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.11/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.11/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.11/mlp/gate_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.11/mlp/up_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.11/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.11/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.11/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.11/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.11/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.12/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.12/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.12/mlp/gate_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.12/mlp/up_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.12/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.12/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.12/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.12/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.12/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.13/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.13/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.13/mlp/gate_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.13/mlp/up_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.13/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.13/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.13/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.13/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.13/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.14/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.14/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.14/mlp/gate_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.14/mlp/up_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.14/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.14/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.14/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.14/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.14/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.15/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.15/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.15/mlp/gate_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.15/mlp/up_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.15/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.15/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.15/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.15/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.15/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.16/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.16/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.16/mlp/gate_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.16/mlp/up_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.16/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.16/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.16/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.16/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.16/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.17/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.17/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.17/mlp/gate_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.17/mlp/up_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.17/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.17/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.17/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.17/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.17/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.18/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.18/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.18/mlp/gate_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.18/mlp/up_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.18/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.18/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.18/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.18/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.18/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.19/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.19/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.19/mlp/gate_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.19/mlp/up_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.19/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.19/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.19/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.19/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.19/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.2/input_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.2/mlp/down_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.2/mlp/gate_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.2/mlp/up_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.2/post_attention_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.2/self_attn/k_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.2/self_attn/o_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.2/self_attn/q_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.2/self_attn/v_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.20/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.20/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.20/mlp/gate_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.20/mlp/up_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.20/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.20/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.20/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.20/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.20/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.21/input_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.21/mlp/down_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.21/mlp/gate_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.21/mlp/up_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.21/post_attention_layernorm/weight:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.21/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.21/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.21/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.21/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.22/input_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.22/mlp/down_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.22/mlp/gate_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.22/mlp/up_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.22/post_attention_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.22/self_attn/k_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.22/self_attn/o_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.22/self_attn/q_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.22/self_attn/v_proj/kernel:0": "tf_model-00002-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.23/input_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.23/mlp/down_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.23/mlp/gate_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.23/mlp/up_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.23/post_attention_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.23/self_attn/k_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.23/self_attn/o_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.23/self_attn/q_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.23/self_attn/v_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.24/input_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.24/mlp/down_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.24/mlp/gate_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.24/mlp/up_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.24/post_attention_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.24/self_attn/k_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.24/self_attn/o_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.24/self_attn/q_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.24/self_attn/v_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.25/input_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.25/mlp/down_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.25/mlp/gate_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.25/mlp/up_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.25/post_attention_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.25/self_attn/k_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.25/self_attn/o_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.25/self_attn/q_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.25/self_attn/v_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.26/input_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.26/mlp/down_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.26/mlp/gate_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.26/mlp/up_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.26/post_attention_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.26/self_attn/k_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.26/self_attn/o_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.26/self_attn/q_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.26/self_attn/v_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.27/input_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.27/mlp/down_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.27/mlp/gate_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.27/mlp/up_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.27/post_attention_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.27/self_attn/k_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.27/self_attn/o_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.27/self_attn/q_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.27/self_attn/v_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.28/input_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.28/mlp/down_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.28/mlp/gate_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.28/mlp/up_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.28/post_attention_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.28/self_attn/k_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.28/self_attn/o_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.28/self_attn/q_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.28/self_attn/v_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.29/input_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.29/mlp/down_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.29/mlp/gate_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.29/mlp/up_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.29/post_attention_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.29/self_attn/k_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.29/self_attn/o_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.29/self_attn/q_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.29/self_attn/v_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.3/input_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.3/mlp/down_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.3/mlp/gate_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.3/mlp/up_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.3/post_attention_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.3/self_attn/k_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.3/self_attn/o_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.3/self_attn/q_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.3/self_attn/v_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.30/input_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.30/mlp/down_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.30/mlp/gate_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.30/mlp/up_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.30/post_attention_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.30/self_attn/k_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.30/self_attn/o_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.30/self_attn/q_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.30/self_attn/v_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.31/input_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.31/mlp/down_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.31/mlp/gate_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.31/mlp/up_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.31/post_attention_layernorm/weight:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.31/self_attn/k_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.31/self_attn/o_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.31/self_attn/q_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.31/self_attn/v_proj/kernel:0": "tf_model-00003-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.4/input_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.4/mlp/down_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.4/mlp/gate_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.4/mlp/up_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.4/post_attention_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.4/self_attn/k_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.4/self_attn/o_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.4/self_attn/q_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.4/self_attn/v_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.5/input_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.5/mlp/down_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.5/mlp/gate_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.5/mlp/up_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.5/post_attention_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.5/self_attn/k_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.5/self_attn/o_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.5/self_attn/q_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.5/self_attn/v_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.6/input_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.6/mlp/down_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.6/mlp/gate_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.6/mlp/up_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.6/post_attention_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.6/self_attn/k_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.6/self_attn/o_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.6/self_attn/q_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.6/self_attn/v_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.7/input_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.7/mlp/down_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.7/mlp/gate_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.7/mlp/up_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.7/post_attention_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.7/self_attn/k_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.7/self_attn/o_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.7/self_attn/q_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.7/self_attn/v_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.8/input_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.8/mlp/down_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.8/mlp/gate_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.8/mlp/up_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.8/post_attention_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.8/self_attn/k_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.8/self_attn/o_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.8/self_attn/q_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.8/self_attn/v_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.9/input_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.9/mlp/down_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.9/mlp/gate_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.9/mlp/up_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.9/post_attention_layernorm/weight:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.9/self_attn/k_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.9/self_attn/o_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.9/self_attn/q_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/layers.9/self_attn/v_proj/kernel:0": "tf_model-00001-of-00003.h5",
    "tf_mistral_for_causal_lm/model/norm/weight:0": "tf_model-00003-of-00003.h5"
  }
}