alexxi19 commited on
Commit
27be771
·
verified ·
1 Parent(s): 7f9c1d1

Upload folder using huggingface_hub

Browse files
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. README.md +56 -0
  3. config.json +27 -0
  4. mergekit_config.yml +26 -0
  5. model-00001-of-00017.safetensors +3 -0
  6. model-00001-of-00025.safetensors +3 -0
  7. model-00002-of-00017.safetensors +3 -0
  8. model-00002-of-00025.safetensors +3 -0
  9. model-00003-of-00017.safetensors +3 -0
  10. model-00003-of-00025.safetensors +3 -0
  11. model-00004-of-00017.safetensors +3 -0
  12. model-00004-of-00025.safetensors +3 -0
  13. model-00005-of-00017.safetensors +3 -0
  14. model-00005-of-00025.safetensors +3 -0
  15. model-00006-of-00017.safetensors +3 -0
  16. model-00006-of-00025.safetensors +3 -0
  17. model-00007-of-00017.safetensors +3 -0
  18. model-00007-of-00025.safetensors +3 -0
  19. model-00008-of-00017.safetensors +3 -0
  20. model-00008-of-00025.safetensors +3 -0
  21. model-00009-of-00017.safetensors +3 -0
  22. model-00009-of-00025.safetensors +3 -0
  23. model-00010-of-00017.safetensors +3 -0
  24. model-00010-of-00025.safetensors +3 -0
  25. model-00011-of-00017.safetensors +3 -0
  26. model-00011-of-00025.safetensors +3 -0
  27. model-00012-of-00017.safetensors +3 -0
  28. model-00012-of-00025.safetensors +3 -0
  29. model-00013-of-00017.safetensors +3 -0
  30. model-00013-of-00025.safetensors +3 -0
  31. model-00014-of-00017.safetensors +3 -0
  32. model-00014-of-00025.safetensors +3 -0
  33. model-00015-of-00017.safetensors +3 -0
  34. model-00015-of-00025.safetensors +3 -0
  35. model-00016-of-00017.safetensors +3 -0
  36. model-00016-of-00025.safetensors +3 -0
  37. model-00017-of-00017.safetensors +3 -0
  38. model-00017-of-00025.safetensors +3 -0
  39. model-00018-of-00025.safetensors +3 -0
  40. model-00019-of-00025.safetensors +3 -0
  41. model-00020-of-00025.safetensors +3 -0
  42. model-00021-of-00025.safetensors +3 -0
  43. model-00022-of-00025.safetensors +3 -0
  44. model-00023-of-00025.safetensors +3 -0
  45. model-00024-of-00025.safetensors +3 -0
  46. model-00025-of-00025.safetensors +3 -0
  47. model.safetensors.index.json +1 -0
  48. special_tokens_map.json +30 -0
  49. tokenizer.json +3 -0
  50. tokenizer_config.json +0 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model:
3
+ - alexxi19/ft-v1-violet
4
+ - anthracite-org/magnum-v2-12b
5
+ library_name: transformers
6
+ tags:
7
+ - mergekit
8
+ - merge
9
+
10
+ ---
11
+ # merged_llm
12
+
13
+ This is a merge of pre-trained language models created using [mergekit](https://github.com/cg123/mergekit).
14
+
15
+ ## Merge Details
16
+ ### Merge Method
17
+
18
+ This model was merged using the [TIES](https://arxiv.org/abs/2306.01708) merge method using [alexxi19/ft-v1-violet](https://huggingface.co/alexxi19/ft-v1-violet) as a base.
19
+
20
+ ### Models Merged
21
+
22
+ The following models were included in the merge:
23
+ * [anthracite-org/magnum-v2-12b](https://huggingface.co/anthracite-org/magnum-v2-12b)
24
+
25
+ ### Configuration
26
+
27
+ The following YAML configuration was used to produce this model:
28
+
29
+ ```yaml
30
+ models:
31
+ # - model: meta-llama/Meta-Llama-3.1-8B-Instruct
32
+ # - model: Epiculous/Violet_Twilight-v0.2 # Another RP Model trained on... stuff
33
+ # parameters:
34
+ # density: 0.4
35
+ # weight: 0.4
36
+ - model: anthracite-org/magnum-v2-12b # Another RP / Storytelling Model
37
+ parameters:
38
+ density: 0.4
39
+ weight: 0.4
40
+ # - model: maldv/badger-iota-llama-3-8b #Megamerge - Helps with General Knowledge
41
+ # parameters:
42
+ # density: 0.6
43
+ # weight: 0.5
44
+ - model: alexxi19/ft-v1-violet
45
+ parameters:
46
+ density: 0.7
47
+ weight: 0.6
48
+ merge_method: ties
49
+ base_model: alexxi19/ft-v1-violet
50
+ parameters:
51
+ int8_mask: true
52
+ rescale: true
53
+ normalize: false
54
+ dtype: bfloat16
55
+ chat_template: chatml
56
+ ```
config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "alexxi19/ft-v1-violet",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 15,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 5120,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 14336,
14
+ "max_position_embeddings": 1024000,
15
+ "model_type": "mistral",
16
+ "num_attention_heads": 32,
17
+ "num_hidden_layers": 40,
18
+ "num_key_value_heads": 8,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.48.0",
25
+ "use_cache": false,
26
+ "vocab_size": 131072
27
+ }
mergekit_config.yml ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ models:
2
+ # - model: meta-llama/Meta-Llama-3.1-8B-Instruct
3
+ # - model: Epiculous/Violet_Twilight-v0.2 # Another RP Model trained on... stuff
4
+ # parameters:
5
+ # density: 0.4
6
+ # weight: 0.4
7
+ - model: anthracite-org/magnum-v2-12b # Another RP / Storytelling Model
8
+ parameters:
9
+ density: 0.4
10
+ weight: 0.4
11
+ # - model: maldv/badger-iota-llama-3-8b #Megamerge - Helps with General Knowledge
12
+ # parameters:
13
+ # density: 0.6
14
+ # weight: 0.5
15
+ - model: alexxi19/ft-v1-violet
16
+ parameters:
17
+ density: 0.7
18
+ weight: 0.6
19
+ merge_method: ties
20
+ base_model: alexxi19/ft-v1-violet
21
+ parameters:
22
+ int8_mask: true
23
+ rescale: true
24
+ normalize: false
25
+ dtype: bfloat16
26
+ chat_template: chatml
model-00001-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4557790552e67297138d594690b564a3f8419e94cdffb115cf2697c9c60377ac
3
+ size 1050673280
model-00001-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fac972374236aef5d6a73850ade5e529963509c905ddf7ae78e9cb0b525ca647
3
+ size 1342177408
model-00002-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85dedb0447c741f26ee20baa004b0f11ca51210ad536be6950a86df1718d7c3b
3
+ size 1050673296
model-00002-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:931d660b5b1f9a6c0c7c10a1cf232e1940c8c2f6622ee61721e2e1402b41c2b3
3
+ size 1342177424
model-00003-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d643e62b388fadf153f875110588ddc07b5c63b05ff08895bbc0b82cbc4847e2
3
+ size 989899000
model-00003-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54eae70027ebe3505bf0d6ea07b1cf2f7011894aea1fc81dd48f7e2634e71db6
3
+ size 996189888
model-00004-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb4dcc690319741ce9adc88baddf017a39e917ee23539d66a423c9d1c12b31e2
3
+ size 989890728
model-00004-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:98f012a119098110e0869d730d9ec9378e9ff164c19352e2b978392b624a154e
3
+ size 933265104
model-00005-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74b1c3eee6f40ad21aa6bdd0bd14c3a75c314065d9c194be94a9f89b33a305be
3
+ size 998287760
model-00005-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26d0424ff7922fdb6077de10917c37600298a4a53c023585a90dadceebba2c49
3
+ size 943761344
model-00006-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e36b7dcbca2f01743b86d1cd56b08c8dbd23afdfa1440d01dd66fa3dbbb3c91e
3
+ size 947956216
model-00006-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f14e96ccc94a26a10eee695ef0a7eba92960dfb2dde7352f57f0b140dac508f
3
+ size 943761344
model-00007-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f2da0a58ad009ef832a015e9b875c5afdeb7570815084a46bcf6640897e75d2
3
+ size 989890728
model-00007-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:011c7e308a682fb3426efa9a662464d29e068d76614b8dfb34de02476bebcbb7
3
+ size 996179560
model-00008-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:403b9615fc56cc288d479f87476007bbfb8503b14e1c89f058af87e89d5f84c3
3
+ size 989890720
model-00008-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:909759b5e01b9e75eac36e094b42a90c922e94871f4ecdae41dc6fe78cee71f8
3
+ size 933265104
model-00009-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57797790173961b474135b9fb336162bf162bc6d22b9129da6cc97d962b18147
3
+ size 998287760
model-00009-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5eb062f5b9078ea69f0514931732ef6404e8faa8f319e5c7be689c2f8f16c1e0
3
+ size 943761344
model-00010-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8d68b303d825f751725d2be045a23cdadd09f8d5f73ae32b83018b7464dd907
3
+ size 947956216
model-00010-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:330652f6bd54da477794277824f40af8885dd707fcd91ed01b191e549984c1ec
3
+ size 943761344
model-00011-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35fb4d239893e1370534d8e6b9f6efb08d2de3fad33452ab84b4d19611457420
3
+ size 989890728
model-00011-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d888621ce9765c3eb6f64d037ac9828f732702fc7b4bdd639dda68cb97eac79
3
+ size 996179560
model-00012-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f8e8cb48f28d75c13337c387eb462f6d8500420a5b01108f7acddff4759b16f
3
+ size 989890728
model-00012-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:975c789daad118df5f2dffb934970a60124cd34d3432e8402f37c9dd11ef2bc4
3
+ size 933265104
model-00013-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ec823137e4ab7989b2f4440be8eb047a8756d5d7ea8a705dc803a3fae4e8f78
3
+ size 998287752
model-00013-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45699f51d2554195a1ef6e14131b258b1828ac88157ae0f845c07d8ceb0e45cd
3
+ size 943761344
model-00014-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea155dccee68feb8505c1465f5113e7f8a874f94f0a3587e1b137303b6dba944
3
+ size 947956208
model-00014-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e7d222c4b691c623965875c70c172e2ea1f6185bd07659fe214c0d364325c3e
3
+ size 943761344
model-00015-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfbccdf85720ba64732cea8e852aaeb6c122306d7376bcb3d5fb2978f950fd72
3
+ size 989890712
model-00015-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db33f1822c7853f0ccdb248dd638a61085a983ad2c1c653bc8b1ff607531c02b
3
+ size 996179560
model-00016-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45a6088697aff362a8a2805a3e1b763fbb96865680bccf6d7c638e3e87ea2037
3
+ size 989890712
model-00016-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06d1ed6b5e297950c02594663cbb268ecf463e961a44efddb7ca2edef6cea379
3
+ size 933265096
model-00017-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9bd760166ffd1cb073b231029ff2b0a9404317b276c6991bfed3015d2c459a6
3
+ size 201343792
model-00017-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9ce4d0cb4bfe73f7a40edf60ad1abbca5ac4f58f8a5209453191403bea4bc1d
3
+ size 943761344
model-00018-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9420a6b14d748fa9694de81fea37313b97439d1b9f568d98056e60a8b9b3d6e9
3
+ size 943761344
model-00019-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:efb21b31198c12d4f6647e686ab30a58cc293ee4a8f89cd6d1896c0f94141351
3
+ size 996179560
model-00020-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:141aa2196d09a6e46e0b3689106ce94b98251155f59551474bae18ae9ff13a3c
3
+ size 933265104
model-00021-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92a4448b2e80ae97f0216f10e21e894f5c84b06fcdb126ffe145bf6e2c582acf
3
+ size 943761344
model-00022-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4677a2c21d5d29cca119b8d30c30b20f03dc9f6911d88de362ee1b73f6c52664
3
+ size 943761336
model-00023-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06f262985a4d42d41800a74d0ac7d2ac01ed01fa8f6c7ddf9d4b6ae574ed7e8d
3
+ size 996179552
model-00024-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7d215b4740cee3fd5443f9847395b1b528b5e0a829467289918d08374753ccc
3
+ size 933265088
model-00025-of-00025.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16eb0c7f9cd638a944c8ef16ff7b13fb0fc6c78edf15b755ca6f4f8f9c6cb9fe
3
+ size 796960560
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.0.5.2", "total_size": 24495564800}, "weight_map": {"lm_head.weight": "model-00001-of-00025.safetensors", "model.embed_tokens.weight": "model-00002-of-00025.safetensors", "model.layers.0.input_layernorm.weight": "model-00003-of-00025.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00003-of-00025.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00003-of-00025.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00003-of-00025.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00003-of-00025.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00003-of-00025.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00003-of-00025.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00003-of-00025.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00003-of-00025.safetensors", "model.layers.1.input_layernorm.weight": "model-00003-of-00025.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00003-of-00025.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00003-of-00025.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00003-of-00025.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00003-of-00025.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00003-of-00025.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00004-of-00025.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00004-of-00025.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00004-of-00025.safetensors", "model.layers.10.input_layernorm.weight": "model-00004-of-00025.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00004-of-00025.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00004-of-00025.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00004-of-00025.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00004-of-00025.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00004-of-00025.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00004-of-00025.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00004-of-00025.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00004-of-00025.safetensors", "model.layers.11.input_layernorm.weight": "model-00004-of-00025.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00004-of-00025.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00004-of-00025.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00005-of-00025.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00005-of-00025.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00005-of-00025.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00005-of-00025.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00005-of-00025.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00005-of-00025.safetensors", "model.layers.12.input_layernorm.weight": "model-00005-of-00025.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00005-of-00025.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00005-of-00025.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00005-of-00025.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00005-of-00025.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00005-of-00025.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00005-of-00025.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00005-of-00025.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00005-of-00025.safetensors", "model.layers.13.input_layernorm.weight": "model-00005-of-00025.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00005-of-00025.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00006-of-00025.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00006-of-00025.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00006-of-00025.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00006-of-00025.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00006-of-00025.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00006-of-00025.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00006-of-00025.safetensors", "model.layers.14.input_layernorm.weight": "model-00006-of-00025.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00006-of-00025.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00006-of-00025.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00006-of-00025.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00006-of-00025.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00006-of-00025.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00006-of-00025.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00006-of-00025.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00006-of-00025.safetensors", "model.layers.15.input_layernorm.weight": "model-00006-of-00025.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00007-of-00025.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00007-of-00025.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00007-of-00025.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00007-of-00025.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00007-of-00025.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00007-of-00025.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00007-of-00025.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00007-of-00025.safetensors", "model.layers.16.input_layernorm.weight": "model-00007-of-00025.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00007-of-00025.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00007-of-00025.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00007-of-00025.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00007-of-00025.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00007-of-00025.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00008-of-00025.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00008-of-00025.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00008-of-00025.safetensors", "model.layers.17.input_layernorm.weight": "model-00008-of-00025.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00008-of-00025.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00008-of-00025.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00008-of-00025.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00008-of-00025.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00008-of-00025.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00008-of-00025.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00008-of-00025.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00008-of-00025.safetensors", "model.layers.18.input_layernorm.weight": "model-00008-of-00025.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00008-of-00025.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00008-of-00025.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00009-of-00025.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00009-of-00025.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00009-of-00025.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00009-of-00025.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00009-of-00025.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00009-of-00025.safetensors", "model.layers.19.input_layernorm.weight": "model-00009-of-00025.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00009-of-00025.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00009-of-00025.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00009-of-00025.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00009-of-00025.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00009-of-00025.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00009-of-00025.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00009-of-00025.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00009-of-00025.safetensors", "model.layers.2.input_layernorm.weight": "model-00009-of-00025.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00009-of-00025.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00010-of-00025.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00010-of-00025.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00010-of-00025.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00010-of-00025.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00010-of-00025.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00010-of-00025.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00010-of-00025.safetensors", "model.layers.20.input_layernorm.weight": "model-00010-of-00025.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00010-of-00025.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00010-of-00025.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00010-of-00025.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00010-of-00025.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00010-of-00025.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00010-of-00025.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00010-of-00025.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00010-of-00025.safetensors", "model.layers.21.input_layernorm.weight": "model-00010-of-00025.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00011-of-00025.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00011-of-00025.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00011-of-00025.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00011-of-00025.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00011-of-00025.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00011-of-00025.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00011-of-00025.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00011-of-00025.safetensors", "model.layers.22.input_layernorm.weight": "model-00011-of-00025.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00011-of-00025.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00011-of-00025.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00011-of-00025.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00011-of-00025.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00011-of-00025.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00012-of-00025.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00012-of-00025.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00012-of-00025.safetensors", "model.layers.23.input_layernorm.weight": "model-00012-of-00025.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00012-of-00025.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00012-of-00025.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00012-of-00025.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00012-of-00025.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00012-of-00025.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00012-of-00025.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00012-of-00025.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00012-of-00025.safetensors", "model.layers.24.input_layernorm.weight": "model-00012-of-00025.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00012-of-00025.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00012-of-00025.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00013-of-00025.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00013-of-00025.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00013-of-00025.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00013-of-00025.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00013-of-00025.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00013-of-00025.safetensors", "model.layers.25.input_layernorm.weight": "model-00013-of-00025.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00013-of-00025.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00013-of-00025.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00013-of-00025.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00013-of-00025.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00013-of-00025.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00013-of-00025.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00013-of-00025.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00013-of-00025.safetensors", "model.layers.26.input_layernorm.weight": "model-00013-of-00025.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00013-of-00025.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00014-of-00025.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00014-of-00025.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00014-of-00025.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00014-of-00025.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00014-of-00025.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00014-of-00025.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00014-of-00025.safetensors", "model.layers.27.input_layernorm.weight": "model-00014-of-00025.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00014-of-00025.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00014-of-00025.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00014-of-00025.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00014-of-00025.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00014-of-00025.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00014-of-00025.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00014-of-00025.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00014-of-00025.safetensors", "model.layers.28.input_layernorm.weight": "model-00014-of-00025.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00015-of-00025.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00015-of-00025.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00015-of-00025.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00015-of-00025.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00015-of-00025.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00015-of-00025.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00015-of-00025.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00015-of-00025.safetensors", "model.layers.29.input_layernorm.weight": "model-00015-of-00025.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00015-of-00025.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00015-of-00025.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00015-of-00025.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00015-of-00025.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00015-of-00025.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00016-of-00025.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00016-of-00025.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00016-of-00025.safetensors", "model.layers.3.input_layernorm.weight": "model-00016-of-00025.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00016-of-00025.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00016-of-00025.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00016-of-00025.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00016-of-00025.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00016-of-00025.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00016-of-00025.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00016-of-00025.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00016-of-00025.safetensors", "model.layers.30.input_layernorm.weight": "model-00016-of-00025.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00016-of-00025.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00016-of-00025.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00017-of-00025.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00017-of-00025.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00017-of-00025.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00017-of-00025.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00017-of-00025.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00017-of-00025.safetensors", "model.layers.31.input_layernorm.weight": "model-00017-of-00025.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00017-of-00025.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00017-of-00025.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00017-of-00025.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00017-of-00025.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00017-of-00025.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00017-of-00025.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00017-of-00025.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00017-of-00025.safetensors", "model.layers.32.input_layernorm.weight": "model-00017-of-00025.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00017-of-00025.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00018-of-00025.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00018-of-00025.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00018-of-00025.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00018-of-00025.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00018-of-00025.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00018-of-00025.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00018-of-00025.safetensors", "model.layers.33.input_layernorm.weight": "model-00018-of-00025.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00018-of-00025.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00018-of-00025.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00018-of-00025.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00018-of-00025.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00018-of-00025.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00018-of-00025.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00018-of-00025.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00018-of-00025.safetensors", "model.layers.34.input_layernorm.weight": "model-00018-of-00025.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00019-of-00025.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00019-of-00025.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00019-of-00025.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00019-of-00025.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00019-of-00025.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00019-of-00025.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00019-of-00025.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00019-of-00025.safetensors", "model.layers.35.input_layernorm.weight": "model-00019-of-00025.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00019-of-00025.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00019-of-00025.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00019-of-00025.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00019-of-00025.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00019-of-00025.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00020-of-00025.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00020-of-00025.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00020-of-00025.safetensors", "model.layers.36.input_layernorm.weight": "model-00020-of-00025.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00020-of-00025.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00020-of-00025.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00020-of-00025.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00020-of-00025.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00020-of-00025.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00020-of-00025.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00020-of-00025.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00020-of-00025.safetensors", "model.layers.37.input_layernorm.weight": "model-00020-of-00025.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00020-of-00025.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00020-of-00025.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00021-of-00025.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00021-of-00025.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00021-of-00025.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00021-of-00025.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00021-of-00025.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00021-of-00025.safetensors", "model.layers.38.input_layernorm.weight": "model-00021-of-00025.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00021-of-00025.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00021-of-00025.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00021-of-00025.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00021-of-00025.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00021-of-00025.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00021-of-00025.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00021-of-00025.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00021-of-00025.safetensors", "model.layers.39.input_layernorm.weight": "model-00021-of-00025.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00021-of-00025.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00022-of-00025.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00022-of-00025.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00022-of-00025.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00022-of-00025.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00022-of-00025.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00022-of-00025.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00022-of-00025.safetensors", "model.layers.4.input_layernorm.weight": "model-00022-of-00025.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00022-of-00025.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00022-of-00025.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00022-of-00025.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00022-of-00025.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00022-of-00025.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00022-of-00025.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00022-of-00025.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00022-of-00025.safetensors", "model.layers.5.input_layernorm.weight": "model-00022-of-00025.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00023-of-00025.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00023-of-00025.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00023-of-00025.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00023-of-00025.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00023-of-00025.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00023-of-00025.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00023-of-00025.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00023-of-00025.safetensors", "model.layers.6.input_layernorm.weight": "model-00023-of-00025.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00023-of-00025.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00023-of-00025.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00023-of-00025.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00023-of-00025.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00023-of-00025.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00024-of-00025.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00024-of-00025.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00024-of-00025.safetensors", "model.layers.7.input_layernorm.weight": "model-00024-of-00025.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00024-of-00025.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00024-of-00025.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00024-of-00025.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00024-of-00025.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00024-of-00025.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00024-of-00025.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00024-of-00025.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00024-of-00025.safetensors", "model.layers.8.input_layernorm.weight": "model-00024-of-00025.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00024-of-00025.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00024-of-00025.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00025-of-00025.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00025-of-00025.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00025-of-00025.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00025-of-00025.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00025-of-00025.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00025-of-00025.safetensors", "model.layers.9.input_layernorm.weight": "model-00025-of-00025.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00025-of-00025.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00025-of-00025.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00025-of-00025.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00025-of-00025.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00025-of-00025.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00025-of-00025.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00025-of-00025.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00025-of-00025.safetensors", "model.norm.weight": "model-00025-of-00025.safetensors"}}
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b4c8fcd33487a449c07f423d47adb035bba8347ccf13eb074b4d1fef8acf919
3
+ size 17078288
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff