blueRab2it commited on
Commit
09f03ab
1 Parent(s): 87e8107

Upload folder using huggingface_hub

Browse files
.ipynb_checkpoints/mergekit_config-checkpoint.yml ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ slices:
2
+ - sources:
3
+ - model: yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B
4
+ layer_range: [0, 32]
5
+ - model: zhengr/MixTAO-7Bx2-MoE-v8.1
6
+ layer_range: [0, 32]
7
+ merge_method: slerp
8
+ base_model: yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B
9
+ parameters:
10
+ t:
11
+ - filter: self_attn
12
+ value: [0, 0.5, 0.3, 0.7, 1]
13
+ - filter: mlp
14
+ value: [1, 0.5, 0.7, 0.3, 0]
15
+ - value: 0.5
16
+ dtype: bfloat16
README.md CHANGED
@@ -1,3 +1,63 @@
1
  ---
2
- license: apache-2.0
 
 
 
 
 
 
 
 
3
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ tags:
3
+ - merge
4
+ - mergekit
5
+ - lazymergekit
6
+ - yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B
7
+ - zhengr/MixTAO-7Bx2-MoE-v8.1
8
+ base_model:
9
+ - yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B
10
+ - zhengr/MixTAO-7Bx2-MoE-v8.1
11
  ---
12
+
13
+ # Godrick_7Bx2_MoE_13B-v0.1
14
+
15
+ Godrick_7Bx2_MoE_13B-v0.1 is a merge of the following models using [LazyMergekit](https://colab.research.google.com/drive/1obulZ1ROXHjYLn6PPZJwRR6GzgQogxxb?usp=sharing):
16
+ * [yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B](https://huggingface.co/yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B)
17
+ * [zhengr/MixTAO-7Bx2-MoE-v8.1](https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-v8.1)
18
+
19
+ ## 🧩 Configuration
20
+
21
+ ```yaml
22
+ slices:
23
+ - sources:
24
+ - model: yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B
25
+ layer_range: [0, 32]
26
+ - model: zhengr/MixTAO-7Bx2-MoE-v8.1
27
+ layer_range: [0, 32]
28
+ merge_method: slerp
29
+ base_model: yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B
30
+ parameters:
31
+ t:
32
+ - filter: self_attn
33
+ value: [0, 0.5, 0.3, 0.7, 1]
34
+ - filter: mlp
35
+ value: [1, 0.5, 0.7, 0.3, 0]
36
+ - value: 0.5
37
+ dtype: bfloat16
38
+ ```
39
+
40
+ ## 💻 Usage
41
+
42
+ ```python
43
+ !pip install -qU transformers accelerate
44
+
45
+ from transformers import AutoTokenizer
46
+ import transformers
47
+ import torch
48
+
49
+ model = "blueRab2it/Godrick_7Bx2_MoE_13B-v0.1"
50
+ messages = [{"role": "user", "content": "What is a large language model?"}]
51
+
52
+ tokenizer = AutoTokenizer.from_pretrained(model)
53
+ prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
54
+ pipeline = transformers.pipeline(
55
+ "text-generation",
56
+ model=model,
57
+ torch_dtype=torch.float16,
58
+ device_map="auto",
59
+ )
60
+
61
+ outputs = pipeline(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
62
+ print(outputs[0]["generated_text"])
63
+ ```
config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B",
3
+ "architectures": [
4
+ "MixtralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 32768,
14
+ "model_type": "mixtral",
15
+ "num_attention_heads": 32,
16
+ "num_experts_per_tok": 2,
17
+ "num_hidden_layers": 32,
18
+ "num_key_value_heads": 8,
19
+ "num_local_experts": 2,
20
+ "output_router_logits": false,
21
+ "pad_token_id": 2,
22
+ "rms_norm_eps": 1e-05,
23
+ "rope_theta": 10000.0,
24
+ "router_aux_loss_coef": 0.001,
25
+ "sliding_window": null,
26
+ "tie_word_embeddings": false,
27
+ "torch_dtype": "bfloat16",
28
+ "transformers_version": "4.38.2",
29
+ "unsloth_version": "2024.1",
30
+ "use_cache": true,
31
+ "vocab_size": 32000
32
+ }
mergekit_config.yml ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ slices:
2
+ - sources:
3
+ - model: yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B
4
+ layer_range: [0, 32]
5
+ - model: zhengr/MixTAO-7Bx2-MoE-v8.1
6
+ layer_range: [0, 32]
7
+ merge_method: slerp
8
+ base_model: yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B
9
+ parameters:
10
+ t:
11
+ - filter: self_attn
12
+ value: [0, 0.5, 0.3, 0.7, 1]
13
+ - filter: mlp
14
+ value: [1, 0.5, 0.7, 0.3, 0]
15
+ - value: 0.5
16
+ dtype: bfloat16
model-00001-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69b5e84b7d3f67d78c5c5ba36bcfb28032f981423d0f72d946924155f810384d
3
+ size 1906313128
model-00002-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7cc16a445d3eb26ab6326d162fdabed24635c0914290446e993978d1f1290de2
3
+ size 1996490960
model-00003-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4fb84f193f002ff026fc2c104eb2c13f1c0fcda605483f3ad5e3b1cb62bebd22
3
+ size 1996490968
model-00004-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1e26563242fcfdff1018cc25fecfcd9777fb0a9cf05806a900bd84222c4be3a
3
+ size 1996490960
model-00005-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47d323ebfed39cefae0312147ed6d6c49d53e42ece1a48e4072a574cb0e50f2e
3
+ size 1906321400
model-00006-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2000bc549a33509e7adc7d1a4cb4a53fccdf6ac69d60674275516d695ea8aa18
3
+ size 1996490960
model-00007-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8158c6ba5a8aa0cae3f62df72cc8f4721375eb1a9311c07121c24847947c697
3
+ size 1996490968
model-00008-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17ed8491331152ebb8b4000faf3359ffe7e837b70c7094e1f49abf032a552415
3
+ size 1996490968
model-00009-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f02d709a95baab3e00faecd0441958907d7d7e1cf29b9708ef1b434edc769e3
3
+ size 1996490952
model-00010-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b85970aa3b92e1035890d38244a0c4d5cc284feab0b9d86810e6e53334df1444
3
+ size 1996490968
model-00011-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ecf3887d00c5b5d4539bd55d39f70b5f06096bad34b7004c33d9fed44bfdd3a
3
+ size 1996490968
model-00012-of-00012.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3df6612bed69e979af99063bc2214f4edd329713d5d1f5821faeaf9b73b12633
3
+ size 1292375104
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.0.4.1"}, "weight_map": {"model.layers.5.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00012.safetensors", "model.layers.5.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00012.safetensors", "model.layers.4.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00012.safetensors", "model.layers.4.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00012.safetensors", "model.layers.3.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00012.safetensors", "model.layers.3.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00012.safetensors", "model.layers.2.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00012.safetensors", "model.layers.2.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00012.safetensors", "model.layers.1.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00012.safetensors", "model.layers.1.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00012.safetensors", "model.layers.0.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00012.safetensors", "model.layers.0.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00012.safetensors", "model.embed_tokens.weight": "model-00001-of-00012.safetensors", "model.layers.11.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00012.safetensors", "model.layers.11.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00012.safetensors", "model.layers.10.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00012.safetensors", "model.layers.10.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00012.safetensors", "model.layers.9.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00012.safetensors", "model.layers.9.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00012.safetensors", "model.layers.8.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00012.safetensors", "model.layers.8.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00012.safetensors", "model.layers.7.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00012.safetensors", "model.layers.7.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00012.safetensors", "model.layers.6.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00012.safetensors", "model.layers.6.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00012.safetensors", "model.layers.18.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00012.safetensors", "model.layers.17.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00012.safetensors", "model.layers.14.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00012.safetensors", "model.layers.13.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00012.safetensors", "model.layers.13.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00012.safetensors", "model.layers.12.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00012.safetensors", "model.layers.12.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00012.safetensors", "model.layers.17.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00012.safetensors", "model.layers.16.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00012.safetensors", "model.layers.16.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00012.safetensors", "model.layers.15.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00012.safetensors", "model.layers.15.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00012.safetensors", "model.layers.14.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00012.safetensors", "model.layers.24.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00012.safetensors", "model.layers.23.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00012.safetensors", "model.layers.22.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00012.safetensors", "model.layers.22.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00012.safetensors", "model.layers.21.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00012.safetensors", "model.layers.21.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00012.safetensors", "model.layers.20.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00012.safetensors", "model.layers.20.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00012.safetensors", "model.layers.19.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00012.safetensors", "model.layers.19.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00012.safetensors", "model.layers.18.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00012.safetensors", "model.layers.23.block_sparse_moe.experts.0.w3.weight": "model-00004-of-00012.safetensors", "model.layers.7.block_sparse_moe.experts.1.w2.weight": "model-00004-of-00012.safetensors", "model.layers.7.block_sparse_moe.experts.0.w2.weight": "model-00004-of-00012.safetensors", "model.layers.6.block_sparse_moe.experts.1.w2.weight": "model-00004-of-00012.safetensors", "model.layers.30.block_sparse_moe.experts.1.w3.weight": "model-00004-of-00012.safetensors", "model.layers.30.block_sparse_moe.experts.0.w3.weight": "model-00004-of-00012.safetensors", "model.layers.29.block_sparse_moe.experts.1.w3.weight": "model-00004-of-00012.safetensors", "model.layers.29.block_sparse_moe.experts.0.w3.weight": "model-00004-of-00012.safetensors", "model.layers.28.block_sparse_moe.experts.1.w3.weight": "model-00004-of-00012.safetensors", "model.layers.28.block_sparse_moe.experts.0.w3.weight": "model-00004-of-00012.safetensors", "model.layers.27.block_sparse_moe.experts.1.w3.weight": "model-00004-of-00012.safetensors", "model.layers.27.block_sparse_moe.experts.0.w3.weight": "model-00004-of-00012.safetensors", "model.layers.26.block_sparse_moe.experts.1.w3.weight": "model-00004-of-00012.safetensors", "model.layers.26.block_sparse_moe.experts.0.w3.weight": "model-00004-of-00012.safetensors", "model.layers.25.block_sparse_moe.experts.1.w3.weight": "model-00004-of-00012.safetensors", "model.layers.25.block_sparse_moe.experts.0.w3.weight": "model-00004-of-00012.safetensors", "model.layers.24.block_sparse_moe.experts.1.w3.weight": "model-00004-of-00012.safetensors", "model.layers.6.block_sparse_moe.experts.0.w2.weight": "model-00005-of-00012.safetensors", "model.layers.5.block_sparse_moe.experts.1.w2.weight": "model-00005-of-00012.safetensors", "model.layers.5.block_sparse_moe.experts.0.w2.weight": "model-00005-of-00012.safetensors", "model.layers.4.block_sparse_moe.experts.1.w2.weight": "model-00005-of-00012.safetensors", "model.layers.4.block_sparse_moe.experts.0.w2.weight": "model-00005-of-00012.safetensors", "model.layers.3.block_sparse_moe.experts.1.w2.weight": "model-00005-of-00012.safetensors", "model.layers.3.block_sparse_moe.experts.0.w2.weight": "model-00005-of-00012.safetensors", "model.layers.2.block_sparse_moe.experts.1.w2.weight": "model-00005-of-00012.safetensors", "model.layers.2.block_sparse_moe.experts.0.w2.weight": "model-00005-of-00012.safetensors", "model.layers.1.block_sparse_moe.experts.1.w2.weight": "model-00005-of-00012.safetensors", "model.layers.1.block_sparse_moe.experts.0.w2.weight": "model-00005-of-00012.safetensors", "model.layers.0.block_sparse_moe.experts.1.w2.weight": "model-00005-of-00012.safetensors", "model.layers.0.block_sparse_moe.experts.0.w2.weight": "model-00005-of-00012.safetensors", "lm_head.weight": "model-00005-of-00012.safetensors", "model.norm.weight": "model-00005-of-00012.safetensors", "model.layers.31.block_sparse_moe.experts.1.w3.weight": "model-00005-of-00012.safetensors", "model.layers.31.block_sparse_moe.experts.0.w3.weight": "model-00006-of-00012.safetensors", "model.layers.16.block_sparse_moe.experts.0.w2.weight": "model-00006-of-00012.safetensors", "model.layers.15.block_sparse_moe.experts.1.w2.weight": "model-00006-of-00012.safetensors", "model.layers.15.block_sparse_moe.experts.0.w2.weight": "model-00006-of-00012.safetensors", "model.layers.14.block_sparse_moe.experts.1.w2.weight": "model-00006-of-00012.safetensors", "model.layers.14.block_sparse_moe.experts.0.w2.weight": "model-00006-of-00012.safetensors", "model.layers.13.block_sparse_moe.experts.1.w2.weight": "model-00006-of-00012.safetensors", "model.layers.13.block_sparse_moe.experts.0.w2.weight": "model-00006-of-00012.safetensors", "model.layers.12.block_sparse_moe.experts.1.w2.weight": "model-00006-of-00012.safetensors", "model.layers.12.block_sparse_moe.experts.0.w2.weight": "model-00006-of-00012.safetensors", "model.layers.11.block_sparse_moe.experts.1.w2.weight": "model-00006-of-00012.safetensors", "model.layers.11.block_sparse_moe.experts.0.w2.weight": "model-00006-of-00012.safetensors", "model.layers.10.block_sparse_moe.experts.1.w2.weight": "model-00006-of-00012.safetensors", "model.layers.10.block_sparse_moe.experts.0.w2.weight": "model-00006-of-00012.safetensors", "model.layers.9.block_sparse_moe.experts.1.w2.weight": "model-00006-of-00012.safetensors", "model.layers.9.block_sparse_moe.experts.0.w2.weight": "model-00006-of-00012.safetensors", "model.layers.8.block_sparse_moe.experts.1.w2.weight": "model-00006-of-00012.safetensors", "model.layers.24.block_sparse_moe.experts.1.w2.weight": "model-00007-of-00012.safetensors", "model.layers.24.block_sparse_moe.experts.0.w2.weight": "model-00007-of-00012.safetensors", "model.layers.23.block_sparse_moe.experts.1.w2.weight": "model-00007-of-00012.safetensors", "model.layers.23.block_sparse_moe.experts.0.w2.weight": "model-00007-of-00012.safetensors", "model.layers.22.block_sparse_moe.experts.1.w2.weight": "model-00007-of-00012.safetensors", "model.layers.22.block_sparse_moe.experts.0.w2.weight": "model-00007-of-00012.safetensors", "model.layers.21.block_sparse_moe.experts.1.w2.weight": "model-00007-of-00012.safetensors", "model.layers.21.block_sparse_moe.experts.0.w2.weight": "model-00007-of-00012.safetensors", "model.layers.20.block_sparse_moe.experts.1.w2.weight": "model-00007-of-00012.safetensors", "model.layers.20.block_sparse_moe.experts.0.w2.weight": "model-00007-of-00012.safetensors", "model.layers.19.block_sparse_moe.experts.1.w2.weight": "model-00007-of-00012.safetensors", "model.layers.19.block_sparse_moe.experts.0.w2.weight": "model-00007-of-00012.safetensors", "model.layers.18.block_sparse_moe.experts.1.w2.weight": "model-00007-of-00012.safetensors", "model.layers.18.block_sparse_moe.experts.0.w2.weight": "model-00007-of-00012.safetensors", "model.layers.17.block_sparse_moe.experts.1.w2.weight": "model-00007-of-00012.safetensors", "model.layers.17.block_sparse_moe.experts.0.w2.weight": "model-00007-of-00012.safetensors", "model.layers.8.block_sparse_moe.experts.0.w2.weight": "model-00007-of-00012.safetensors", "model.layers.31.block_sparse_moe.experts.1.w2.weight": "model-00008-of-00012.safetensors", "model.layers.31.block_sparse_moe.experts.0.w2.weight": "model-00008-of-00012.safetensors", "model.layers.30.block_sparse_moe.experts.1.w2.weight": "model-00008-of-00012.safetensors", "model.layers.30.block_sparse_moe.experts.0.w2.weight": "model-00008-of-00012.safetensors", "model.layers.29.block_sparse_moe.experts.1.w2.weight": "model-00008-of-00012.safetensors", "model.layers.29.block_sparse_moe.experts.0.w2.weight": "model-00008-of-00012.safetensors", "model.layers.28.block_sparse_moe.experts.1.w2.weight": "model-00008-of-00012.safetensors", "model.layers.28.block_sparse_moe.experts.0.w2.weight": "model-00008-of-00012.safetensors", "model.layers.27.block_sparse_moe.experts.1.w2.weight": "model-00008-of-00012.safetensors", "model.layers.27.block_sparse_moe.experts.0.w2.weight": "model-00008-of-00012.safetensors", "model.layers.26.block_sparse_moe.experts.1.w2.weight": "model-00008-of-00012.safetensors", "model.layers.26.block_sparse_moe.experts.0.w2.weight": "model-00008-of-00012.safetensors", "model.layers.25.block_sparse_moe.experts.1.w2.weight": "model-00008-of-00012.safetensors", "model.layers.25.block_sparse_moe.experts.0.w2.weight": "model-00008-of-00012.safetensors", "model.layers.16.block_sparse_moe.experts.1.w2.weight": "model-00008-of-00012.safetensors", "model.layers.1.block_sparse_moe.experts.0.w1.weight": "model-00008-of-00012.safetensors", "model.layers.0.block_sparse_moe.experts.1.w1.weight": "model-00008-of-00012.safetensors", "model.layers.9.block_sparse_moe.experts.1.w1.weight": "model-00009-of-00012.safetensors", "model.layers.9.block_sparse_moe.experts.0.w1.weight": "model-00009-of-00012.safetensors", "model.layers.8.block_sparse_moe.experts.1.w1.weight": "model-00009-of-00012.safetensors", "model.layers.8.block_sparse_moe.experts.0.w1.weight": "model-00009-of-00012.safetensors", "model.layers.7.block_sparse_moe.experts.1.w1.weight": "model-00009-of-00012.safetensors", "model.layers.7.block_sparse_moe.experts.0.w1.weight": "model-00009-of-00012.safetensors", "model.layers.6.block_sparse_moe.experts.1.w1.weight": "model-00009-of-00012.safetensors", "model.layers.6.block_sparse_moe.experts.0.w1.weight": "model-00009-of-00012.safetensors", "model.layers.5.block_sparse_moe.experts.1.w1.weight": "model-00009-of-00012.safetensors", "model.layers.5.block_sparse_moe.experts.0.w1.weight": "model-00009-of-00012.safetensors", "model.layers.4.block_sparse_moe.experts.1.w1.weight": "model-00009-of-00012.safetensors", "model.layers.4.block_sparse_moe.experts.0.w1.weight": "model-00009-of-00012.safetensors", "model.layers.3.block_sparse_moe.experts.1.w1.weight": "model-00009-of-00012.safetensors", "model.layers.3.block_sparse_moe.experts.0.w1.weight": "model-00009-of-00012.safetensors", "model.layers.2.block_sparse_moe.experts.1.w1.weight": "model-00009-of-00012.safetensors", "model.layers.2.block_sparse_moe.experts.0.w1.weight": "model-00009-of-00012.safetensors", "model.layers.0.block_sparse_moe.experts.0.w1.weight": "model-00009-of-00012.safetensors", "model.layers.18.block_sparse_moe.experts.0.w1.weight": "model-00010-of-00012.safetensors", "model.layers.17.block_sparse_moe.experts.1.w1.weight": "model-00010-of-00012.safetensors", "model.layers.17.block_sparse_moe.experts.0.w1.weight": "model-00010-of-00012.safetensors", "model.layers.16.block_sparse_moe.experts.1.w1.weight": "model-00010-of-00012.safetensors", "model.layers.16.block_sparse_moe.experts.0.w1.weight": "model-00010-of-00012.safetensors", "model.layers.15.block_sparse_moe.experts.1.w1.weight": "model-00010-of-00012.safetensors", "model.layers.15.block_sparse_moe.experts.0.w1.weight": "model-00010-of-00012.safetensors", "model.layers.14.block_sparse_moe.experts.1.w1.weight": "model-00010-of-00012.safetensors", "model.layers.14.block_sparse_moe.experts.0.w1.weight": "model-00010-of-00012.safetensors", "model.layers.13.block_sparse_moe.experts.1.w1.weight": "model-00010-of-00012.safetensors", "model.layers.13.block_sparse_moe.experts.0.w1.weight": "model-00010-of-00012.safetensors", "model.layers.12.block_sparse_moe.experts.1.w1.weight": "model-00010-of-00012.safetensors", "model.layers.12.block_sparse_moe.experts.0.w1.weight": "model-00010-of-00012.safetensors", "model.layers.11.block_sparse_moe.experts.1.w1.weight": "model-00010-of-00012.safetensors", "model.layers.11.block_sparse_moe.experts.0.w1.weight": "model-00010-of-00012.safetensors", "model.layers.10.block_sparse_moe.experts.1.w1.weight": "model-00010-of-00012.safetensors", "model.layers.1.block_sparse_moe.experts.1.w1.weight": "model-00010-of-00012.safetensors", "model.layers.26.block_sparse_moe.experts.1.w1.weight": "model-00011-of-00012.safetensors", "model.layers.26.block_sparse_moe.experts.0.w1.weight": "model-00011-of-00012.safetensors", "model.layers.25.block_sparse_moe.experts.1.w1.weight": "model-00011-of-00012.safetensors", "model.layers.25.block_sparse_moe.experts.0.w1.weight": "model-00011-of-00012.safetensors", "model.layers.24.block_sparse_moe.experts.1.w1.weight": "model-00011-of-00012.safetensors", "model.layers.24.block_sparse_moe.experts.0.w1.weight": "model-00011-of-00012.safetensors", "model.layers.23.block_sparse_moe.experts.1.w1.weight": "model-00011-of-00012.safetensors", "model.layers.23.block_sparse_moe.experts.0.w1.weight": "model-00011-of-00012.safetensors", "model.layers.22.block_sparse_moe.experts.1.w1.weight": "model-00011-of-00012.safetensors", "model.layers.22.block_sparse_moe.experts.0.w1.weight": "model-00011-of-00012.safetensors", "model.layers.21.block_sparse_moe.experts.1.w1.weight": "model-00011-of-00012.safetensors", "model.layers.21.block_sparse_moe.experts.0.w1.weight": "model-00011-of-00012.safetensors", "model.layers.20.block_sparse_moe.experts.1.w1.weight": "model-00011-of-00012.safetensors", "model.layers.20.block_sparse_moe.experts.0.w1.weight": "model-00011-of-00012.safetensors", "model.layers.19.block_sparse_moe.experts.1.w1.weight": "model-00011-of-00012.safetensors", "model.layers.19.block_sparse_moe.experts.0.w1.weight": "model-00011-of-00012.safetensors", "model.layers.10.block_sparse_moe.experts.0.w1.weight": "model-00011-of-00012.safetensors", "model.layers.31.block_sparse_moe.experts.1.w1.weight": "model-00012-of-00012.safetensors", "model.layers.31.block_sparse_moe.experts.0.w1.weight": "model-00012-of-00012.safetensors", "model.layers.30.block_sparse_moe.experts.1.w1.weight": "model-00012-of-00012.safetensors", "model.layers.30.block_sparse_moe.experts.0.w1.weight": "model-00012-of-00012.safetensors", "model.layers.29.block_sparse_moe.experts.1.w1.weight": "model-00012-of-00012.safetensors", "model.layers.29.block_sparse_moe.experts.0.w1.weight": "model-00012-of-00012.safetensors", "model.layers.28.block_sparse_moe.experts.1.w1.weight": "model-00012-of-00012.safetensors", "model.layers.28.block_sparse_moe.experts.0.w1.weight": "model-00012-of-00012.safetensors", "model.layers.27.block_sparse_moe.experts.1.w1.weight": "model-00012-of-00012.safetensors", "model.layers.27.block_sparse_moe.experts.0.w1.weight": "model-00012-of-00012.safetensors", "model.layers.18.block_sparse_moe.experts.1.w1.weight": "model-00012-of-00012.safetensors", "model.layers.31.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.30.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.29.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.28.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.27.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.26.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.25.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.24.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.23.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.22.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.21.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.20.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.19.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.18.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.17.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.16.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.15.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.14.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.13.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.12.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.11.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.10.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.9.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.8.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.7.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.6.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.5.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.4.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.3.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.2.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.1.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors", "model.layers.0.block_sparse_moe.gate.weight": "model-00012-of-00012.safetensors"}}
special_tokens_map.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>"
6
+ ],
7
+ "bos_token": {
8
+ "content": "<s>",
9
+ "lstrip": false,
10
+ "normalized": false,
11
+ "rstrip": false,
12
+ "single_word": false
13
+ },
14
+ "eos_token": {
15
+ "content": "</s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "pad_token": {
22
+ "content": "<s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false
27
+ },
28
+ "unk_token": {
29
+ "content": "<unk>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false
34
+ }
35
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ }
29
+ },
30
+ "additional_special_tokens": [
31
+ "<unk>",
32
+ "<s>",
33
+ "</s>"
34
+ ],
35
+ "bos_token": "<s>",
36
+ "clean_up_tokenization_spaces": false,
37
+ "eos_token": "</s>",
38
+ "legacy": true,
39
+ "max_length": null,
40
+ "model_max_length": 255,
41
+ "pad_to_multiple_of": null,
42
+ "pad_token": "<s>",
43
+ "pad_token_type_id": 0,
44
+ "padding_side": "left",
45
+ "sp_model_kwargs": {},
46
+ "spaces_between_special_tokens": false,
47
+ "tokenizer_class": "LlamaTokenizer",
48
+ "unk_token": "<unk>",
49
+ "use_default_system_prompt": false
50
+ }