Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- README.md +52 -0
- config.json +30 -0
- mergekit_config.yml +20 -0
- merges.txt +0 -0
- model-00001-of-00043.safetensors +3 -0
- model-00002-of-00043.safetensors +3 -0
- model-00003-of-00043.safetensors +3 -0
- model-00004-of-00043.safetensors +3 -0
- model-00005-of-00043.safetensors +3 -0
- model-00006-of-00043.safetensors +3 -0
- model-00007-of-00043.safetensors +3 -0
- model-00008-of-00043.safetensors +3 -0
- model-00009-of-00043.safetensors +3 -0
- model-00010-of-00043.safetensors +3 -0
- model-00011-of-00043.safetensors +3 -0
- model-00012-of-00043.safetensors +3 -0
- model-00013-of-00043.safetensors +3 -0
- model-00014-of-00043.safetensors +3 -0
- model-00015-of-00043.safetensors +3 -0
- model-00016-of-00043.safetensors +3 -0
- model-00017-of-00043.safetensors +3 -0
- model-00018-of-00043.safetensors +3 -0
- model-00019-of-00043.safetensors +3 -0
- model-00020-of-00043.safetensors +3 -0
- model-00021-of-00043.safetensors +3 -0
- model-00022-of-00043.safetensors +3 -0
- model-00023-of-00043.safetensors +3 -0
- model-00024-of-00043.safetensors +3 -0
- model-00025-of-00043.safetensors +3 -0
- model-00026-of-00043.safetensors +3 -0
- model-00027-of-00043.safetensors +3 -0
- model-00028-of-00043.safetensors +3 -0
- model-00029-of-00043.safetensors +3 -0
- model-00030-of-00043.safetensors +3 -0
- model-00031-of-00043.safetensors +3 -0
- model-00032-of-00043.safetensors +3 -0
- model-00033-of-00043.safetensors +3 -0
- model-00034-of-00043.safetensors +3 -0
- model-00035-of-00043.safetensors +3 -0
- model-00036-of-00043.safetensors +3 -0
- model-00037-of-00043.safetensors +3 -0
- model-00038-of-00043.safetensors +3 -0
- model-00039-of-00043.safetensors +3 -0
- model-00040-of-00043.safetensors +3 -0
- model-00041-of-00043.safetensors +3 -0
- model-00042-of-00043.safetensors +3 -0
- model-00043-of-00043.safetensors +3 -0
- model.safetensors.index.json +1 -0
- special_tokens_map.json +23 -0
- tokenizer.json +0 -0
README.md
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
base_model:
|
3 |
+
- migtissera/Tess-72B-v1.5b
|
4 |
+
- davidkim205/Rhea-72b-v0.5
|
5 |
+
library_name: transformers
|
6 |
+
tags:
|
7 |
+
- mergekit
|
8 |
+
- merge
|
9 |
+
|
10 |
+
---
|
11 |
+
# zelus
|
12 |
+
|
13 |
+
This is a merge of pre-trained language models created using [mergekit](https://github.com/cg123/mergekit).
|
14 |
+
|
15 |
+
## Merge Details
|
16 |
+
### Merge Method
|
17 |
+
|
18 |
+
This model was merged using the SLERP merge method.
|
19 |
+
|
20 |
+
### Models Merged
|
21 |
+
|
22 |
+
The following models were included in the merge:
|
23 |
+
* [migtissera/Tess-72B-v1.5b](https://huggingface.co/migtissera/Tess-72B-v1.5b)
|
24 |
+
* [davidkim205/Rhea-72b-v0.5](https://huggingface.co/davidkim205/Rhea-72b-v0.5)
|
25 |
+
|
26 |
+
### Configuration
|
27 |
+
|
28 |
+
The following YAML configuration was used to produce this model:
|
29 |
+
|
30 |
+
```yaml
|
31 |
+
slices:
|
32 |
+
- sources:
|
33 |
+
- model: davidkim205/Rhea-72b-v0.5
|
34 |
+
layer_range: [0, 40]
|
35 |
+
- model: migtissera/Tess-72B-v1.5b
|
36 |
+
layer_range: [0, 40]
|
37 |
+
# or, the equivalent models: syntax:
|
38 |
+
# models:
|
39 |
+
# - model: psmathur/orca_mini_v3_13b
|
40 |
+
# - model: garage-bAInd/Platypus2-13B
|
41 |
+
merge_method: slerp
|
42 |
+
base_model: davidkim205/Rhea-72b-v0.5
|
43 |
+
parameters:
|
44 |
+
t:
|
45 |
+
- filter: self_attn
|
46 |
+
value: [0, 0.5, 0.3, 0.7, 1]
|
47 |
+
- filter: mlp
|
48 |
+
value: [1, 0.5, 0.7, 0.3, 0]
|
49 |
+
- value: 0.5 # fallback for rest of tensors
|
50 |
+
dtype: float16
|
51 |
+
|
52 |
+
```
|
config.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "davidkim205/Rhea-72b-v0.5",
|
3 |
+
"architectures": [
|
4 |
+
"LlamaForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_bias": true,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bos_token_id": 1,
|
9 |
+
"eos_token_id": 151643,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 8192,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 24576,
|
14 |
+
"max_position_embeddings": 32768,
|
15 |
+
"model_type": "llama",
|
16 |
+
"num_attention_heads": 64,
|
17 |
+
"num_hidden_layers": 40,
|
18 |
+
"num_key_value_heads": 64,
|
19 |
+
"pad_token_id": 151643,
|
20 |
+
"pretraining_tp": 1,
|
21 |
+
"rms_norm_eps": 1e-06,
|
22 |
+
"rope_scaling": null,
|
23 |
+
"rope_theta": 1000000,
|
24 |
+
"seq_length": 32768,
|
25 |
+
"tie_word_embeddings": false,
|
26 |
+
"torch_dtype": "float16",
|
27 |
+
"transformers_version": "4.39.1",
|
28 |
+
"use_cache": true,
|
29 |
+
"vocab_size": 152064
|
30 |
+
}
|
mergekit_config.yml
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
slices:
|
2 |
+
- sources:
|
3 |
+
- model: davidkim205/Rhea-72b-v0.5
|
4 |
+
layer_range: [0, 40]
|
5 |
+
- model: migtissera/Tess-72B-v1.5b
|
6 |
+
layer_range: [0, 40]
|
7 |
+
# or, the equivalent models: syntax:
|
8 |
+
# models:
|
9 |
+
# - model: psmathur/orca_mini_v3_13b
|
10 |
+
# - model: garage-bAInd/Platypus2-13B
|
11 |
+
merge_method: slerp
|
12 |
+
base_model: davidkim205/Rhea-72b-v0.5
|
13 |
+
parameters:
|
14 |
+
t:
|
15 |
+
- filter: self_attn
|
16 |
+
value: [0, 0.5, 0.3, 0.7, 1]
|
17 |
+
- filter: mlp
|
18 |
+
value: [1, 0.5, 0.7, 0.3, 0]
|
19 |
+
- value: 0.5 # fallback for rest of tensors
|
20 |
+
dtype: float16
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
model-00001-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7774d8a589762cdf2dd596cdf9d27b9afbc9742467eac236a2cee6e75a76cbff
|
3 |
+
size 1879065648
|
model-00002-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e3d1cf236da3f5db980eb03a049fb308e6f70672bf9d9435490a133d63846ab
|
3 |
+
size 402670048
|
model-00003-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a1c61e10ff771ad36ec7f35870f3c58b880d2ff197c35c3b818fd6c93ee4889a
|
3 |
+
size 2491416712
|
model-00004-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:65efef25d5e7515218270965783c1346b5452cb22aa9df2f40fd36439c8a7153
|
3 |
+
size 1879065648
|
model-00005-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:529e944a0e54bad7b9d0d14df5372556a4cdb4528f6cad8211f96ac5641bdf6c
|
3 |
+
size 1879098624
|
model-00006-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ccf2694d02f4d151da45236df410fbf99d9b1d65aaa055380297ca4bfd5c2902
|
3 |
+
size 1610646208
|
model-00007-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4b595d2946c7395d513167dd2f27cc62e02089de942ae6967144a6d5e3aa7d5d
|
3 |
+
size 1744864296
|
model-00008-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f28f64933494f1f5627b4dc3c9637c580e33dcd856f137154bf8a4f661c4fb55
|
3 |
+
size 1744864296
|
model-00009-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1d70e31f1ec1feba4fcd82bb2f540f3919a93ee8bf53445edd96f51661af7c32
|
3 |
+
size 1879081888
|
model-00010-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3e496e47627926a30a87c58cb21694fa43de52d84f2ebb9bf97ef6b67cabc385
|
3 |
+
size 1879065648
|
model-00011-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0c41217c1b39527668a5b9b104e777518ea8f6ba37f6c99efecd2f1fb2fc2338
|
3 |
+
size 1879082136
|
model-00012-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d5d6811d8461c2b4f289ce0a45bf36233b87eac625e8cd61c1f3a22154b12d34
|
3 |
+
size 1610630192
|
model-00013-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:703e3209d9ddd154d98befe7f36f56181a1e3f6ee1ec86368991dd2c6b00f73a
|
3 |
+
size 1744864304
|
model-00014-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b91d3e46d7484fce53fdee9cd1ae9768d059072ebf9187113e35741f6afab134
|
3 |
+
size 1879115128
|
model-00015-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8be6a7ab6b86bdf93add0911dbdfa4581c0394549fd5c78bc875ab25335f404c
|
3 |
+
size 1879065656
|
model-00016-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b1fc657e5186cc9330856a1d2bc1b376e0af28bd79cdcc0bd11c001077326f64
|
3 |
+
size 1879082152
|
model-00017-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fea1bca80841a011e9dbb2e15a1aff565d45dce0cac389a6701848d50eb12fb5
|
3 |
+
size 1610662936
|
model-00018-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ec18df098ce2a91a429bc61ed97b27edd13bcfc855a03a837e135cc9df70fbfa
|
3 |
+
size 1744864304
|
model-00019-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8a42350390f32f5f0ca2fd89acfcc742a6b669feeb8b2989688c108bd883d1e7
|
3 |
+
size 1744864304
|
model-00020-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:68339e4e3a07ad603ef26ee59eef313c763fe1b400ac8a28ed88e11248a25562
|
3 |
+
size 1610646208
|
model-00021-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f9cd886ffd43ddb11cc5149c1efe3a69401320faf5007a697ecf8275016f579f
|
3 |
+
size 1744864304
|
model-00022-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:51ec26b98d9c013a3fc0ed0841bf0da83bb2b6a613f0b53ef2653b8ad9037976
|
3 |
+
size 1744864304
|
model-00023-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5e67066c47f1db972698230d190f6c463c2140912c897f9b6e3255dfad39647d
|
3 |
+
size 1879082136
|
model-00024-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6622298e007b71b0dd133f79fd0b29baad546f6ce290a707040b5973eb69ea7c
|
3 |
+
size 1879082136
|
model-00025-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c5d78063c62001d84a6ab1593e90983aa6787eccfcb39e7598272bc568dcdb88
|
3 |
+
size 1744864296
|
model-00026-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a41d0ceb961c6c5c9da4a72ec212b2b854f728b513eb33acd09f928babfd3a93
|
3 |
+
size 1744831304
|
model-00027-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:96f41c0d66ac811b29a8ba5281de18f9bd06192094b206fdc00b2d56308d7054
|
3 |
+
size 1744864304
|
model-00028-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:845ac70980933a412e54defee4e456ac92a7b346883266e609447d5ad21797e8
|
3 |
+
size 1879115368
|
model-00029-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8c9bfe6b0ddf5f24a490b2f0bf18b0772308d574a2f2aaad5349ba71a172ec6a
|
3 |
+
size 1744864296
|
model-00030-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a70fb0986d6bbb3831fbf2e3dbadc71164f404aea2ef2c6b02974793fb64f5f4
|
3 |
+
size 1744864296
|
model-00031-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b736265484cb3c974e769559b9d40c75a46c7ffc47ebc233a07f086da1cda9d3
|
3 |
+
size 1879081904
|
model-00032-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ddbc25381b4b4fc63c9cafd975e208d0f926869e309d3d8c2c7f07d0073eb3a
|
3 |
+
size 1744864304
|
model-00033-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:24480e73bc7654c13b59252d48b52267900de6a940e4324ed9c25d44ef86fb9d
|
3 |
+
size 1744864304
|
model-00034-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b6c0b70038688e3951a0663ce2f33f45c8e20afa4358d945196563df34569010
|
3 |
+
size 1879098400
|
model-00035-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:407491e4b8567106d431ad554200fb9e5878e58ed5209e436fccae192954b402
|
3 |
+
size 1879082152
|
model-00036-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9127ce6b0f811774181b74027040bba78ccdd7bd32c0f95c7f0d2623a417550f
|
3 |
+
size 1610662944
|
model-00037-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:71bd9a7d5f47fc97d088201c0b6006907c08a366f72f4ac6bc57a35713d55eaf
|
3 |
+
size 1744831304
|
model-00038-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5fa3d94d0d84fdbeb2cb77ad1ff1383b460b1249ed9c6d36905ffb8e93418656
|
3 |
+
size 1744864304
|
model-00039-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:baff2cee781427f927d2b682f541bc24bb1b33ca51ee4a32a2f70e13ddcff423
|
3 |
+
size 1744864288
|
model-00040-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9f75bda0ab30580febad103871a8f557fe210ea2003d10dd35b07a70c8173b19
|
3 |
+
size 1744864304
|
model-00041-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a3c32d005e1ceadf0eead58e72af6f72e01e9ee4683cd5b70fa019c21fd5f5b5
|
3 |
+
size 1744897288
|
model-00042-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:42198b1bcd954a208d8a66f0ea2cb7e4e0cf57ad0e4ef2cfe13842ae06a9ee39
|
3 |
+
size 2491416704
|
model-00043-of-00043.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1a2d04c8e7beed10bd249960c5913cce8fe602776f4c0792ccf21c8007605bed
|
3 |
+
size 16504
|
model.safetensors.index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"metadata": {"mergekit_version": "0.0.4.1", "total_size": 74777378816}, "weight_map": {"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00043.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00043.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00043.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00043.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00043.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00043.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00001-of-00043.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00043.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00043.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00002-of-00043.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00002-of-00043.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00002-of-00043.safetensors", "model.layers.0.input_layernorm.weight": "model-00002-of-00043.safetensors", "model.embed_tokens.weight": "model-00003-of-00043.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00004-of-00043.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00004-of-00043.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00004-of-00043.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00004-of-00043.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00004-of-00043.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00004-of-00043.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00004-of-00043.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00004-of-00043.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00004-of-00043.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00005-of-00043.safetensors", "model.layers.3.input_layernorm.weight": "model-00005-of-00043.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00005-of-00043.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00005-of-00043.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00005-of-00043.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00005-of-00043.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00005-of-00043.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00005-of-00043.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00005-of-00043.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00005-of-00043.safetensors", "model.layers.2.input_layernorm.weight": "model-00005-of-00043.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00006-of-00043.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00006-of-00043.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00006-of-00043.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00006-of-00043.safetensors", "model.layers.1.input_layernorm.weight": "model-00006-of-00043.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00006-of-00043.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00007-of-00043.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00007-of-00043.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00007-of-00043.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00007-of-00043.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00007-of-00043.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00007-of-00043.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00007-of-00043.safetensors", "model.layers.6.input_layernorm.weight": "model-00007-of-00043.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00007-of-00043.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00008-of-00043.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00008-of-00043.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00008-of-00043.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00008-of-00043.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00008-of-00043.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00008-of-00043.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00008-of-00043.safetensors", "model.layers.5.input_layernorm.weight": "model-00008-of-00043.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00008-of-00043.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00009-of-00043.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00009-of-00043.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00009-of-00043.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00009-of-00043.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00009-of-00043.safetensors", "model.layers.4.input_layernorm.weight": "model-00009-of-00043.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00009-of-00043.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00009-of-00043.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00010-of-00043.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00010-of-00043.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00010-of-00043.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00010-of-00043.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00010-of-00043.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00010-of-00043.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00010-of-00043.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00010-of-00043.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00010-of-00043.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00011-of-00043.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00011-of-00043.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00011-of-00043.safetensors", "model.layers.8.input_layernorm.weight": "model-00011-of-00043.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00011-of-00043.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00011-of-00043.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00011-of-00043.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00011-of-00043.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00011-of-00043.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00011-of-00043.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00012-of-00043.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00012-of-00043.safetensors", "model.layers.7.input_layernorm.weight": "model-00012-of-00043.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00012-of-00043.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00012-of-00043.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00012-of-00043.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00012-of-00043.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00012-of-00043.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00012-of-00043.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00013-of-00043.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00013-of-00043.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00013-of-00043.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00013-of-00043.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00013-of-00043.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00013-of-00043.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00013-of-00043.safetensors", "model.layers.11.input_layernorm.weight": "model-00013-of-00043.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00013-of-00043.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00014-of-00043.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00014-of-00043.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00014-of-00043.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00014-of-00043.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00014-of-00043.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00014-of-00043.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00014-of-00043.safetensors", "model.layers.10.input_layernorm.weight": "model-00014-of-00043.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00014-of-00043.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00014-of-00043.safetensors", "model.layers.9.input_layernorm.weight": "model-00014-of-00043.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00014-of-00043.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00015-of-00043.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00015-of-00043.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00015-of-00043.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00015-of-00043.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00015-of-00043.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00015-of-00043.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00015-of-00043.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00015-of-00043.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00015-of-00043.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00016-of-00043.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00016-of-00043.safetensors", "model.layers.14.input_layernorm.weight": "model-00016-of-00043.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00016-of-00043.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00016-of-00043.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00016-of-00043.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00016-of-00043.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00016-of-00043.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00016-of-00043.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00016-of-00043.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00017-of-00043.safetensors", "model.layers.13.input_layernorm.weight": "model-00017-of-00043.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00017-of-00043.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00017-of-00043.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00017-of-00043.safetensors", "model.layers.12.input_layernorm.weight": "model-00017-of-00043.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00017-of-00043.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00017-of-00043.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00017-of-00043.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00018-of-00043.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00018-of-00043.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00018-of-00043.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00018-of-00043.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00018-of-00043.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00018-of-00043.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00018-of-00043.safetensors", "model.layers.17.input_layernorm.weight": "model-00018-of-00043.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00018-of-00043.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00019-of-00043.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00019-of-00043.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00019-of-00043.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00019-of-00043.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00019-of-00043.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00019-of-00043.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00019-of-00043.safetensors", "model.layers.16.input_layernorm.weight": "model-00019-of-00043.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00019-of-00043.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00020-of-00043.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00020-of-00043.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00020-of-00043.safetensors", "model.layers.15.input_layernorm.weight": "model-00020-of-00043.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00020-of-00043.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00020-of-00043.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00021-of-00043.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00021-of-00043.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00021-of-00043.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00021-of-00043.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00021-of-00043.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00021-of-00043.safetensors", "model.layers.20.input_layernorm.weight": "model-00021-of-00043.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00021-of-00043.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00021-of-00043.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00022-of-00043.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00022-of-00043.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00022-of-00043.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00022-of-00043.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00022-of-00043.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00022-of-00043.safetensors", "model.layers.19.input_layernorm.weight": "model-00022-of-00043.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00022-of-00043.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00022-of-00043.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00023-of-00043.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00023-of-00043.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00023-of-00043.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00023-of-00043.safetensors", "model.layers.18.input_layernorm.weight": "model-00023-of-00043.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00023-of-00043.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00023-of-00043.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00023-of-00043.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00023-of-00043.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00023-of-00043.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00024-of-00043.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00024-of-00043.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00024-of-00043.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00024-of-00043.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00024-of-00043.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00024-of-00043.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00024-of-00043.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00024-of-00043.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00024-of-00043.safetensors", "model.layers.22.input_layernorm.weight": "model-00024-of-00043.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00025-of-00043.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00025-of-00043.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00025-of-00043.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00025-of-00043.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00025-of-00043.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00025-of-00043.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00025-of-00043.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00025-of-00043.safetensors", "model.layers.21.input_layernorm.weight": "model-00025-of-00043.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00026-of-00043.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00026-of-00043.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00026-of-00043.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00026-of-00043.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00026-of-00043.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00026-of-00043.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00026-of-00043.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00027-of-00043.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00027-of-00043.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00027-of-00043.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00027-of-00043.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00027-of-00043.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00027-of-00043.safetensors", "model.layers.25.input_layernorm.weight": "model-00027-of-00043.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00027-of-00043.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00027-of-00043.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00028-of-00043.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00028-of-00043.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00028-of-00043.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00028-of-00043.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00028-of-00043.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00028-of-00043.safetensors", "model.layers.24.input_layernorm.weight": "model-00028-of-00043.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00028-of-00043.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00028-of-00043.safetensors", "model.layers.23.input_layernorm.weight": "model-00028-of-00043.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00028-of-00043.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00028-of-00043.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00028-of-00043.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00028-of-00043.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00029-of-00043.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00029-of-00043.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00029-of-00043.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00029-of-00043.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00029-of-00043.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00029-of-00043.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00029-of-00043.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00029-of-00043.safetensors", "model.layers.28.input_layernorm.weight": "model-00029-of-00043.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00030-of-00043.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00030-of-00043.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00030-of-00043.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00030-of-00043.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00030-of-00043.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00030-of-00043.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00030-of-00043.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00030-of-00043.safetensors", "model.layers.27.input_layernorm.weight": "model-00030-of-00043.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00031-of-00043.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00031-of-00043.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00031-of-00043.safetensors", "model.layers.26.input_layernorm.weight": "model-00031-of-00043.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00031-of-00043.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00031-of-00043.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00031-of-00043.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00031-of-00043.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00032-of-00043.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00032-of-00043.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00032-of-00043.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00032-of-00043.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00032-of-00043.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00032-of-00043.safetensors", "model.layers.31.input_layernorm.weight": "model-00032-of-00043.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00032-of-00043.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00032-of-00043.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00033-of-00043.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00033-of-00043.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00033-of-00043.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00033-of-00043.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00033-of-00043.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00033-of-00043.safetensors", "model.layers.30.input_layernorm.weight": "model-00033-of-00043.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00033-of-00043.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00033-of-00043.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00034-of-00043.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00034-of-00043.safetensors", "model.layers.29.input_layernorm.weight": "model-00034-of-00043.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00034-of-00043.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00034-of-00043.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00034-of-00043.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00034-of-00043.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00034-of-00043.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00034-of-00043.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00035-of-00043.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00035-of-00043.safetensors", "model.layers.34.input_layernorm.weight": "model-00035-of-00043.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00035-of-00043.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00035-of-00043.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00035-of-00043.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00035-of-00043.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00035-of-00043.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00035-of-00043.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00035-of-00043.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00036-of-00043.safetensors", "model.layers.33.input_layernorm.weight": "model-00036-of-00043.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00036-of-00043.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00036-of-00043.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00036-of-00043.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00036-of-00043.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00036-of-00043.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00036-of-00043.safetensors", "model.layers.32.input_layernorm.weight": "model-00036-of-00043.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00037-of-00043.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00037-of-00043.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00037-of-00043.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00037-of-00043.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00037-of-00043.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00037-of-00043.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00037-of-00043.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00038-of-00043.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00038-of-00043.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00038-of-00043.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00038-of-00043.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00038-of-00043.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00038-of-00043.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00038-of-00043.safetensors", "model.layers.36.input_layernorm.weight": "model-00038-of-00043.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00038-of-00043.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00039-of-00043.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00039-of-00043.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00039-of-00043.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00039-of-00043.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00039-of-00043.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00039-of-00043.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00039-of-00043.safetensors", "model.layers.35.input_layernorm.weight": "model-00039-of-00043.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00039-of-00043.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00040-of-00043.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00040-of-00043.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00040-of-00043.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00040-of-00043.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00040-of-00043.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00040-of-00043.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00040-of-00043.safetensors", "model.layers.39.input_layernorm.weight": "model-00040-of-00043.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00040-of-00043.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00041-of-00043.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00041-of-00043.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00041-of-00043.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00041-of-00043.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00041-of-00043.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00041-of-00043.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00041-of-00043.safetensors", "model.layers.38.input_layernorm.weight": "model-00041-of-00043.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00041-of-00043.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00041-of-00043.safetensors", "model.layers.37.input_layernorm.weight": "model-00041-of-00043.safetensors", "lm_head.weight": "model-00042-of-00043.safetensors", "model.norm.weight": "model-00043-of-00043.safetensors"}}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<|endoftext|>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "<|endoftext|>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"unk_token": {
|
17 |
+
"content": "<|endoftext|>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
}
|
23 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|