teeny-tiny-mixtral / model.safetensors.index.json
Shamane's picture
Upload MixtralForCausalLM
cf55956 verified
{
"metadata": {
"total_size": 12658753536
},
"weight_map": {
"lm_head.weight": "model-00003-of-00003.safetensors",
"model.embed_tokens.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.3.w3.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.4.w1.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.4.w2.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.4.w3.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.5.w1.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.5.w2.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.5.w3.weight": "model-00001-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00003.safetensors",
"model.layers.0.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00003.safetensors",
"model.layers.0.block_sparse_moe.gate.weight": "model-00001-of-00003.safetensors",
"model.layers.0.input_layernorm.weight": "model-00002-of-00003.safetensors",
"model.layers.0.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.1.w2.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.2.w1.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.2.w2.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.2.w3.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.3.w1.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.3.w2.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.3.w3.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.4.w1.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.4.w3.weight": "model-00003-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.5.w1.weight": "model-00003-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.5.w2.weight": "model-00003-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.5.w3.weight": "model-00003-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.6.w1.weight": "model-00003-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.6.w2.weight": "model-00003-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.6.w3.weight": "model-00003-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.7.w1.weight": "model-00003-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.7.w2.weight": "model-00003-of-00003.safetensors",
"model.layers.1.block_sparse_moe.experts.7.w3.weight": "model-00003-of-00003.safetensors",
"model.layers.1.block_sparse_moe.gate.weight": "model-00002-of-00003.safetensors",
"model.layers.1.input_layernorm.weight": "model-00003-of-00003.safetensors",
"model.layers.1.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
"model.layers.1.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
"model.layers.1.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
"model.layers.1.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
"model.layers.1.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
"model.norm.weight": "model-00003-of-00003.safetensors"
}
}