emozilla commited on
Commit
9e09c60
1 Parent(s): 29b38d8
config.json CHANGED
@@ -4,14 +4,14 @@
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 0,
8
- "eos_token_id": 0,
9
  "head_dim": 128,
10
  "hidden_act": "silu",
11
  "hidden_size": 5120,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 17920,
14
- "max_position_embeddings": 4096,
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 40,
@@ -20,10 +20,10 @@
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
22
  "rope_scaling": null,
23
- "rope_theta": 130000.0,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
26
  "transformers_version": "4.46.1",
27
  "use_cache": true,
28
- "vocab_size": 49152
29
  }
 
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
  "head_dim": 128,
10
  "hidden_act": "silu",
11
  "hidden_size": 5120,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 17920,
14
+ "max_position_embeddings": 2048,
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 40,
 
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
22
  "rope_scaling": null,
23
+ "rope_theta": 10000.0,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
26
  "transformers_version": "4.46.1",
27
  "use_cache": true,
28
+ "vocab_size": 32000
29
  }
generation_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 0,
4
- "eos_token_id": 0,
5
  "transformers_version": "4.46.1"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
  "transformers_version": "4.46.1"
6
  }
model-00001-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c9f8b09a72193ca75bca00f0348fd4e748c5b50d6422dda38c17b9dbb3138412
3
- size 5248262744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca6bf4b7cda1cf116d33ccbfca4f65703b9d03c60c9e3ed9971314995d73a807
3
+ size 5326916768
model-00002-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bebee234a1e809efbc2464a5143d364e8aeb2177a0ce5f4d8a62ce6b4ac5d876
3
- size 5227323496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d7ec6bbe356f560d382628bdf57ff93a3b75a1df12fe8ab9e7f31d45f50c9f5
3
+ size 5358395480
model-00003-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5b95514ac3c9a436b24889a2e6dd09ab5bcae3257f8574e14e1f190056eade3e
3
- size 5347899272
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ae2f40ab046d1218b876f1575b6c2f48ae59c5d4338c18ee1f3b39f77b372cf
3
+ size 5216827256
model-00004-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c1a2edd6c0685074a66de86995ba9270f50ec5ae97aa13b7be8d0521241773ce
3
- size 5358395480
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69aeabaeb2cabcc36536b00f8bbcd35797269d8abc0b51d3a97b76902c375d07
3
+ size 5227323488
model-00005-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a53c8f1ccf255af247124199dc35c3f8d4b71dc67e969501153fb825b48cdd60
3
- size 5216827248
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a0895f9f63f0927591d2d5e9fd726c8fd7b87f5b9c67e738922c775ec2d6ffd
3
+ size 5227323488
model-00006-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0477f8dca6960966a998de09f6e22b38d0e217731b0df45a7fe30c6b88505f37
3
- size 3014763360
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c2b94ec6e27950829e7f0b0735f55002c574cd5be1aae7684089846a737434a
3
+ size 2705412160
model.safetensors.index.json CHANGED
@@ -1 +1 @@
1
- {"metadata":{"total_size":29413427200},"weight_map":{"lm_head.weight":"model-00001-of-00006.safetensors","model.embed_tokens.weight":"model-00001-of-00006.safetensors","model.layers.0.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.0.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.0.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.1.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.1.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.10.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.10.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.11.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.11.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.12.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.12.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.13.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.13.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.14.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.14.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.14.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.14.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.14.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.14.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.14.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.14.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.14.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.15.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.15.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.16.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.16.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.17.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.17.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.18.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.18.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.19.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.19.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.2.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.2.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.20.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.20.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.21.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.21.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.21.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.21.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.21.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.21.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.21.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.21.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.21.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.22.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.22.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.23.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.23.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.24.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.24.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.25.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.25.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.26.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.26.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.27.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.27.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.28.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.28.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.28.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.29.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.29.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.29.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.29.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.29.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.29.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.29.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.29.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.29.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.3.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.3.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.30.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.30.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.31.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.31.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.32.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.32.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.33.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.33.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.34.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.34.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.35.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.35.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.35.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.35.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.36.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.36.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.37.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.37.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.38.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.38.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.39.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.39.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.4.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.4.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.40.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.40.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.41.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.41.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.5.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.5.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.5.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.5.mlp.up_proj.weight":"model-00006-of-00006.safetensors","model.layers.5.post_attention_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.5.self_attn.k_proj.weight":"model-00006-of-00006.safetensors","model.layers.5.self_attn.o_proj.weight":"model-00006-of-00006.safetensors","model.layers.5.self_attn.q_proj.weight":"model-00006-of-00006.safetensors","model.layers.5.self_attn.v_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.input_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.6.mlp.down_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.mlp.gate_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.mlp.up_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.post_attention_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.6.self_attn.k_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.self_attn.o_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.self_attn.q_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.self_attn.v_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.input_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.7.mlp.down_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.mlp.gate_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.mlp.up_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.post_attention_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.7.self_attn.k_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.self_attn.o_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.self_attn.q_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.self_attn.v_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.input_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.8.mlp.down_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.mlp.gate_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.mlp.up_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.post_attention_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.8.self_attn.k_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.self_attn.o_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.self_attn.q_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.self_attn.v_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.input_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.9.mlp.down_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.mlp.gate_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.mlp.up_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.post_attention_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.9.self_attn.k_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.self_attn.o_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.self_attn.q_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.self_attn.v_proj.weight":"model-00006-of-00006.safetensors","model.norm.weight":"model-00006-of-00006.safetensors"}}
 
1
+ {"metadata":{"total_size":29062154240},"weight_map":{"lm_head.weight":"model-00001-of-00006.safetensors","model.embed_tokens.weight":"model-00001-of-00006.safetensors","model.layers.0.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.0.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.0.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.0.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.1.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.1.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.1.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.10.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.10.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.10.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.11.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.11.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.11.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.12.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.12.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.12.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.13.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.13.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.self_attn.q_proj.weight":"model-00001-of-00006.safetensors","model.layers.13.self_attn.v_proj.weight":"model-00001-of-00006.safetensors","model.layers.14.input_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.14.mlp.down_proj.weight":"model-00001-of-00006.safetensors","model.layers.14.mlp.gate_proj.weight":"model-00001-of-00006.safetensors","model.layers.14.mlp.up_proj.weight":"model-00001-of-00006.safetensors","model.layers.14.post_attention_layernorm.weight":"model-00001-of-00006.safetensors","model.layers.14.self_attn.k_proj.weight":"model-00001-of-00006.safetensors","model.layers.14.self_attn.o_proj.weight":"model-00001-of-00006.safetensors","model.layers.14.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.14.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.15.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.15.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.15.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.16.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.16.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.16.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.17.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.17.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.17.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.18.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.18.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.18.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.19.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.19.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.19.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.2.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.2.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.2.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.20.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.20.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.self_attn.o_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.self_attn.q_proj.weight":"model-00002-of-00006.safetensors","model.layers.20.self_attn.v_proj.weight":"model-00002-of-00006.safetensors","model.layers.21.input_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.21.mlp.down_proj.weight":"model-00002-of-00006.safetensors","model.layers.21.mlp.gate_proj.weight":"model-00002-of-00006.safetensors","model.layers.21.mlp.up_proj.weight":"model-00002-of-00006.safetensors","model.layers.21.post_attention_layernorm.weight":"model-00002-of-00006.safetensors","model.layers.21.self_attn.k_proj.weight":"model-00002-of-00006.safetensors","model.layers.21.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.21.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.21.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.22.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.22.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.22.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.23.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.23.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.23.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.24.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.24.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.24.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.25.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.25.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.25.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.26.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.26.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.26.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.27.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.27.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.27.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.28.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.mlp.up_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.post_attention_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.28.self_attn.k_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.self_attn.o_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.self_attn.q_proj.weight":"model-00003-of-00006.safetensors","model.layers.28.self_attn.v_proj.weight":"model-00003-of-00006.safetensors","model.layers.29.input_layernorm.weight":"model-00003-of-00006.safetensors","model.layers.29.mlp.down_proj.weight":"model-00003-of-00006.safetensors","model.layers.29.mlp.gate_proj.weight":"model-00003-of-00006.safetensors","model.layers.29.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.29.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.29.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.29.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.29.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.29.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.3.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.3.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.3.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.30.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.30.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.30.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.31.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.31.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.31.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.32.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.32.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.32.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.33.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.33.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.33.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.34.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.34.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.34.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.35.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.mlp.gate_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.mlp.up_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.post_attention_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.35.self_attn.k_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.self_attn.o_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.self_attn.q_proj.weight":"model-00004-of-00006.safetensors","model.layers.35.self_attn.v_proj.weight":"model-00004-of-00006.safetensors","model.layers.36.input_layernorm.weight":"model-00004-of-00006.safetensors","model.layers.36.mlp.down_proj.weight":"model-00004-of-00006.safetensors","model.layers.36.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.36.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.36.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.37.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.37.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.37.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.38.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.38.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.38.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.39.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.39.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.39.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.4.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.4.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.4.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.40.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.40.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.40.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.41.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.41.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.41.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.5.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.5.mlp.down_proj.weight":"model-00005-of-00006.safetensors","model.layers.5.mlp.gate_proj.weight":"model-00005-of-00006.safetensors","model.layers.5.mlp.up_proj.weight":"model-00005-of-00006.safetensors","model.layers.5.post_attention_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.5.self_attn.k_proj.weight":"model-00005-of-00006.safetensors","model.layers.5.self_attn.o_proj.weight":"model-00005-of-00006.safetensors","model.layers.5.self_attn.q_proj.weight":"model-00005-of-00006.safetensors","model.layers.5.self_attn.v_proj.weight":"model-00005-of-00006.safetensors","model.layers.6.input_layernorm.weight":"model-00005-of-00006.safetensors","model.layers.6.mlp.down_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.mlp.gate_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.mlp.up_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.post_attention_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.6.self_attn.k_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.self_attn.o_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.self_attn.q_proj.weight":"model-00006-of-00006.safetensors","model.layers.6.self_attn.v_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.input_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.7.mlp.down_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.mlp.gate_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.mlp.up_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.post_attention_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.7.self_attn.k_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.self_attn.o_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.self_attn.q_proj.weight":"model-00006-of-00006.safetensors","model.layers.7.self_attn.v_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.input_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.8.mlp.down_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.mlp.gate_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.mlp.up_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.post_attention_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.8.self_attn.k_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.self_attn.o_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.self_attn.q_proj.weight":"model-00006-of-00006.safetensors","model.layers.8.self_attn.v_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.input_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.9.mlp.down_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.mlp.gate_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.mlp.up_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.post_attention_layernorm.weight":"model-00006-of-00006.safetensors","model.layers.9.self_attn.k_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.self_attn.o_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.self_attn.q_proj.weight":"model-00006-of-00006.safetensors","model.layers.9.self_attn.v_proj.weight":"model-00006-of-00006.safetensors","model.norm.weight":"model-00006-of-00006.safetensors"}}
special_tokens_map.json CHANGED
@@ -1,41 +1,29 @@
1
  {
2
- "additional_special_tokens": [
3
- "<|endoftext|>",
4
- "<|im_start|>",
5
- "<|im_end|>",
6
- "<repo_name>",
7
- "<reponame>",
8
- "<file_sep>",
9
- "<filename>",
10
- "<gh_stars>",
11
- "<issue_start>",
12
- "<issue_comment>",
13
- "<issue_closed>",
14
- "<jupyter_start>",
15
- "<jupyter_text>",
16
- "<jupyter_code>",
17
- "<jupyter_output>",
18
- "<jupyter_script>",
19
- "<empty_output>"
20
- ],
21
  "bos_token": {
22
- "content": "<|endoftext|>",
23
  "lstrip": false,
24
- "normalized": false,
25
  "rstrip": false,
26
  "single_word": false
27
  },
28
  "eos_token": {
29
- "content": "<|endoftext|>",
30
  "lstrip": false,
31
- "normalized": false,
 
 
 
 
 
 
 
32
  "rstrip": false,
33
  "single_word": false
34
  },
35
  "unk_token": {
36
- "content": "<|endoftext|>",
37
  "lstrip": false,
38
- "normalized": false,
39
  "rstrip": false,
40
  "single_word": false
41
  }
 
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "bos_token": {
3
+ "content": "<s>",
4
  "lstrip": false,
5
+ "normalized": true,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "</s>",
11
  "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
  "rstrip": false,
21
  "single_word": false
22
  },
23
  "unk_token": {
24
+ "content": "<unk>",
25
  "lstrip": false,
26
+ "normalized": true,
27
  "rstrip": false,
28
  "single_word": false
29
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,167 +1,41 @@
1
  {
2
- "add_prefix_space": false,
 
 
3
  "added_tokens_decoder": {
4
  "0": {
5
- "content": "<|endoftext|>",
6
  "lstrip": false,
7
- "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  },
12
  "1": {
13
- "content": "<|im_start|>",
14
  "lstrip": false,
15
- "normalized": false,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
19
  },
20
  "2": {
21
- "content": "<|im_end|>",
22
  "lstrip": false,
23
- "normalized": false,
24
- "rstrip": false,
25
- "single_word": false,
26
- "special": true
27
- },
28
- "3": {
29
- "content": "<repo_name>",
30
- "lstrip": false,
31
- "normalized": false,
32
- "rstrip": false,
33
- "single_word": false,
34
- "special": true
35
- },
36
- "4": {
37
- "content": "<reponame>",
38
- "lstrip": false,
39
- "normalized": false,
40
- "rstrip": false,
41
- "single_word": false,
42
- "special": true
43
- },
44
- "5": {
45
- "content": "<file_sep>",
46
- "lstrip": false,
47
- "normalized": false,
48
- "rstrip": false,
49
- "single_word": false,
50
- "special": true
51
- },
52
- "6": {
53
- "content": "<filename>",
54
- "lstrip": false,
55
- "normalized": false,
56
- "rstrip": false,
57
- "single_word": false,
58
- "special": true
59
- },
60
- "7": {
61
- "content": "<gh_stars>",
62
- "lstrip": false,
63
- "normalized": false,
64
- "rstrip": false,
65
- "single_word": false,
66
- "special": true
67
- },
68
- "8": {
69
- "content": "<issue_start>",
70
- "lstrip": false,
71
- "normalized": false,
72
- "rstrip": false,
73
- "single_word": false,
74
- "special": true
75
- },
76
- "9": {
77
- "content": "<issue_comment>",
78
- "lstrip": false,
79
- "normalized": false,
80
- "rstrip": false,
81
- "single_word": false,
82
- "special": true
83
- },
84
- "10": {
85
- "content": "<issue_closed>",
86
- "lstrip": false,
87
- "normalized": false,
88
- "rstrip": false,
89
- "single_word": false,
90
- "special": true
91
- },
92
- "11": {
93
- "content": "<jupyter_start>",
94
- "lstrip": false,
95
- "normalized": false,
96
- "rstrip": false,
97
- "single_word": false,
98
- "special": true
99
- },
100
- "12": {
101
- "content": "<jupyter_text>",
102
- "lstrip": false,
103
- "normalized": false,
104
- "rstrip": false,
105
- "single_word": false,
106
- "special": true
107
- },
108
- "13": {
109
- "content": "<jupyter_code>",
110
- "lstrip": false,
111
- "normalized": false,
112
- "rstrip": false,
113
- "single_word": false,
114
- "special": true
115
- },
116
- "14": {
117
- "content": "<jupyter_output>",
118
- "lstrip": false,
119
- "normalized": false,
120
- "rstrip": false,
121
- "single_word": false,
122
- "special": true
123
- },
124
- "15": {
125
- "content": "<jupyter_script>",
126
- "lstrip": false,
127
- "normalized": false,
128
- "rstrip": false,
129
- "single_word": false,
130
- "special": true
131
- },
132
- "16": {
133
- "content": "<empty_output>",
134
- "lstrip": false,
135
- "normalized": false,
136
  "rstrip": false,
137
  "single_word": false,
138
  "special": true
139
  }
140
  },
141
- "additional_special_tokens": [
142
- "<|endoftext|>",
143
- "<|im_start|>",
144
- "<|im_end|>",
145
- "<repo_name>",
146
- "<reponame>",
147
- "<file_sep>",
148
- "<filename>",
149
- "<gh_stars>",
150
- "<issue_start>",
151
- "<issue_comment>",
152
- "<issue_closed>",
153
- "<jupyter_start>",
154
- "<jupyter_text>",
155
- "<jupyter_code>",
156
- "<jupyter_output>",
157
- "<jupyter_script>",
158
- "<empty_output>"
159
- ],
160
- "bos_token": "<|endoftext|>",
161
  "clean_up_tokenization_spaces": false,
162
- "eos_token": "<|endoftext|>",
163
- "model_max_length": 8192,
164
- "tokenizer_class": "GPT2Tokenizer",
165
- "unk_token": "<|endoftext|>",
166
- "vocab_size": 49152
 
 
 
167
  }
 
1
  {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
  "added_tokens_decoder": {
6
  "0": {
7
+ "content": "<unk>",
8
  "lstrip": false,
9
+ "normalized": true,
10
  "rstrip": false,
11
  "single_word": false,
12
  "special": true
13
  },
14
  "1": {
15
+ "content": "<s>",
16
  "lstrip": false,
17
+ "normalized": true,
18
  "rstrip": false,
19
  "single_word": false,
20
  "special": true
21
  },
22
  "2": {
23
+ "content": "</s>",
24
  "lstrip": false,
25
+ "normalized": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  "rstrip": false,
27
  "single_word": false,
28
  "special": true
29
  }
30
  },
31
+ "bos_token": "<s>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "legacy": false,
35
+ "model_max_length": 1000000000000000019884624838656,
36
+ "pad_token": "<unk>",
37
+ "sp_model_kwargs": {},
38
+ "tokenizer_class": "LlamaTokenizer",
39
+ "unk_token": "<unk>",
40
+ "use_default_system_prompt": false
41
  }