SmolLM-360M-Instruct-q0f32-MLC / ndarray-cache.json
CharlieFRuan's picture
Upload folder using huggingface_hub
be6e893 verified
{
"metadata": {
"ParamSize": 194,
"ParamBytes": 1447284480.0,
"BitsPerParam": 32.0
},
"records": [
{
"dataPath": "params_shard_0.bin",
"format": "raw-shard",
"nbytes": 94371840,
"records": [
{
"name": "model.embed_tokens.weight",
"shape": [
49152,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 94371840,
"byteOffset": 0
}
],
"md5sum": "22da8ab49794d5908bc01239610bd5e3"
},
{
"dataPath": "params_shard_1.bin",
"format": "raw-shard",
"nbytes": 24581760,
"records": [
{
"name": "model.layers.0.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 0
},
{
"name": "model.layers.0.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1920
},
{
"name": "model.layers.0.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 4917120
},
{
"name": "model.layers.0.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.0.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 14749440
},
{
"name": "model.layers.0.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 17821440
},
{
"name": "model.layers.1.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 19664640
},
{
"name": "model.layers.1.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 19666560
}
],
"md5sum": "3455513031408c2af861eba70f886144"
},
{
"dataPath": "params_shard_2.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.1.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.1.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.1.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.1.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.10.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.10.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.10.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.10.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.10.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "397cafc55ca40192036183358d099274"
},
{
"dataPath": "params_shard_3.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.10.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.11.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.11.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.11.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.11.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.11.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.11.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.12.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.12.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "efbb69b92372f6951c3cc800dc7dc10b"
},
{
"dataPath": "params_shard_4.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.12.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.12.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.12.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.12.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.13.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.13.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.13.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.13.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.13.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "9088e6bd76c248e03d421bf018f75efa"
},
{
"dataPath": "params_shard_5.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.13.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.14.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.14.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.14.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.14.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.14.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.14.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.15.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.15.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "7a69ef249750a9f069bfcf4b9be03c2e"
},
{
"dataPath": "params_shard_6.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.15.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.15.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.15.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.15.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.16.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.16.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.16.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.16.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.16.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "764e0cf62c2966328c0eb4f3e8c864fa"
},
{
"dataPath": "params_shard_7.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.16.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.17.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.17.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.17.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.17.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.17.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.17.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.18.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.18.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "af08efb2d0c28c46ec91467dd68ac361"
},
{
"dataPath": "params_shard_8.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.18.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.18.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.18.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.18.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.19.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.19.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.19.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.19.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.19.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "1f17fcc52b5f5daa546000fde63cddbe"
},
{
"dataPath": "params_shard_9.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.19.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.2.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.2.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.2.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.2.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.2.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.2.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.20.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.20.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "36171e79d7e3df56ff92903f1cd3bea3"
},
{
"dataPath": "params_shard_10.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.20.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.20.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.20.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.20.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.21.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.21.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.21.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.21.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.21.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "ef7a6f039eabff1178996715ac8eeda0"
},
{
"dataPath": "params_shard_11.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.21.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.22.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.22.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.22.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.22.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.22.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.22.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.23.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.23.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "d9a649c7a7fbef5edc5c9d331a9504e0"
},
{
"dataPath": "params_shard_12.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.23.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.23.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.23.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.23.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.24.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.24.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.24.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.24.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.24.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "12fd2a3cba331815232faa25e2f76bbd"
},
{
"dataPath": "params_shard_13.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.24.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.25.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.25.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.25.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.25.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.25.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.25.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.26.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.26.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "8d5e9487ffb9775b7a14cf26e27bd251"
},
{
"dataPath": "params_shard_14.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.26.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.26.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.26.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.26.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.27.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.27.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.27.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.27.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.27.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "fe80334f71e180c122c4cc24c6675835"
},
{
"dataPath": "params_shard_15.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.27.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.28.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.28.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.28.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.28.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.28.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.28.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.29.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.29.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "9dbb293e707b26ce5256589b533dd335"
},
{
"dataPath": "params_shard_16.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.29.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.29.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.29.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.29.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.3.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.3.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.3.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.3.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.3.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "ae7c4f24535aef8841ff0e6e2615872b"
},
{
"dataPath": "params_shard_17.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.3.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.30.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.30.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.30.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.30.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.30.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.30.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.31.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.31.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "fb992b59ad6b0aff109e2027167400bd"
},
{
"dataPath": "params_shard_18.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.31.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.31.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.31.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.31.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.4.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.4.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.4.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.4.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.4.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "617763ed09849f428dc86a125311b4fc"
},
{
"dataPath": "params_shard_19.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.4.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.5.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.5.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.5.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.5.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.5.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.5.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.6.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.6.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "6a7fbc46c6f54ff39c616c1deef3c4aa"
},
{
"dataPath": "params_shard_20.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.6.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.6.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.6.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.6.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.7.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.7.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.7.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.7.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.7.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "ede385b1568d001a41ab667e9f527c4f"
},
{
"dataPath": "params_shard_21.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.7.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.8.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.8.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.8.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.8.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.8.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.8.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.9.input_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.9.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "db03c78ccc57f144e6b1b2ca25c70e08"
},
{
"dataPath": "params_shard_22.bin",
"format": "raw-shard",
"nbytes": 14749440,
"records": [
{
"name": "model.layers.9.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.9.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.9.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.9.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.norm.weight",
"shape": [
960
],
"dtype": "float32",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
}
],
"md5sum": "4a0b8950406ba4ce5b50c109fdcb5283"
}
]
}