tiny-random-CodeGenForCausalLM-sharded / model.safetensors.index.json
ybelkada's picture
ybelkada HF staff
Adding `safetensors` variant of this model (#1)
9542de2 verified
{
"metadata": {
"total_size": 1827456
},
"weight_map": {
"lm_head.bias": "model-00003-of-00003.safetensors",
"lm_head.weight": "model-00003-of-00003.safetensors",
"transformer.h.0.attn.causal_mask": "model-00001-of-00003.safetensors",
"transformer.h.0.attn.out_proj.weight": "model-00001-of-00003.safetensors",
"transformer.h.0.attn.qkv_proj.weight": "model-00001-of-00003.safetensors",
"transformer.h.0.ln_1.bias": "model-00001-of-00003.safetensors",
"transformer.h.0.ln_1.weight": "model-00001-of-00003.safetensors",
"transformer.h.0.mlp.fc_in.bias": "model-00001-of-00003.safetensors",
"transformer.h.0.mlp.fc_in.weight": "model-00001-of-00003.safetensors",
"transformer.h.0.mlp.fc_out.bias": "model-00001-of-00003.safetensors",
"transformer.h.0.mlp.fc_out.weight": "model-00001-of-00003.safetensors",
"transformer.h.1.attn.causal_mask": "model-00001-of-00003.safetensors",
"transformer.h.1.attn.out_proj.weight": "model-00001-of-00003.safetensors",
"transformer.h.1.attn.qkv_proj.weight": "model-00001-of-00003.safetensors",
"transformer.h.1.ln_1.bias": "model-00001-of-00003.safetensors",
"transformer.h.1.ln_1.weight": "model-00001-of-00003.safetensors",
"transformer.h.1.mlp.fc_in.bias": "model-00001-of-00003.safetensors",
"transformer.h.1.mlp.fc_in.weight": "model-00001-of-00003.safetensors",
"transformer.h.1.mlp.fc_out.bias": "model-00001-of-00003.safetensors",
"transformer.h.1.mlp.fc_out.weight": "model-00001-of-00003.safetensors",
"transformer.h.2.attn.causal_mask": "model-00002-of-00003.safetensors",
"transformer.h.2.attn.out_proj.weight": "model-00002-of-00003.safetensors",
"transformer.h.2.attn.qkv_proj.weight": "model-00002-of-00003.safetensors",
"transformer.h.2.ln_1.bias": "model-00001-of-00003.safetensors",
"transformer.h.2.ln_1.weight": "model-00001-of-00003.safetensors",
"transformer.h.2.mlp.fc_in.bias": "model-00002-of-00003.safetensors",
"transformer.h.2.mlp.fc_in.weight": "model-00002-of-00003.safetensors",
"transformer.h.2.mlp.fc_out.bias": "model-00002-of-00003.safetensors",
"transformer.h.2.mlp.fc_out.weight": "model-00002-of-00003.safetensors",
"transformer.h.3.attn.causal_mask": "model-00002-of-00003.safetensors",
"transformer.h.3.attn.out_proj.weight": "model-00002-of-00003.safetensors",
"transformer.h.3.attn.qkv_proj.weight": "model-00002-of-00003.safetensors",
"transformer.h.3.ln_1.bias": "model-00002-of-00003.safetensors",
"transformer.h.3.ln_1.weight": "model-00002-of-00003.safetensors",
"transformer.h.3.mlp.fc_in.bias": "model-00002-of-00003.safetensors",
"transformer.h.3.mlp.fc_in.weight": "model-00002-of-00003.safetensors",
"transformer.h.3.mlp.fc_out.bias": "model-00002-of-00003.safetensors",
"transformer.h.3.mlp.fc_out.weight": "model-00002-of-00003.safetensors",
"transformer.h.4.attn.causal_mask": "model-00002-of-00003.safetensors",
"transformer.h.4.attn.out_proj.weight": "model-00002-of-00003.safetensors",
"transformer.h.4.attn.qkv_proj.weight": "model-00002-of-00003.safetensors",
"transformer.h.4.ln_1.bias": "model-00002-of-00003.safetensors",
"transformer.h.4.ln_1.weight": "model-00002-of-00003.safetensors",
"transformer.h.4.mlp.fc_in.bias": "model-00002-of-00003.safetensors",
"transformer.h.4.mlp.fc_in.weight": "model-00002-of-00003.safetensors",
"transformer.h.4.mlp.fc_out.bias": "model-00002-of-00003.safetensors",
"transformer.h.4.mlp.fc_out.weight": "model-00002-of-00003.safetensors",
"transformer.ln_f.bias": "model-00002-of-00003.safetensors",
"transformer.ln_f.weight": "model-00002-of-00003.safetensors",
"transformer.wte.weight": "model-00001-of-00003.safetensors"
}
}