Llama-3.2-1B-Instruct-MNN / llm_config.json
zhaode's picture
Upload folder using huggingface_hub
383ca26 verified
raw
history blame contribute delete
340 Bytes
{
"hidden_size": 2048,
"layer_nums": 16,
"attention_mask": "float",
"key_value_shape": [
2,
1,
0,
8,
64
],
"prompt_template": "<|begin_of_text|><|start_header_id|>user<|end_header_id|>\n\n%s<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
"is_visual": false
}