{ | |
"model_type": "llama", | |
"vocab_size": 32000, | |
"hidden_size": 4096, | |
"num_attention_heads": 32, | |
"num_hidden_layers": 24, | |
"intermediate_size": 11008, | |
"max_position_embeddings": 2048, | |
"use_cache": true, | |
"layer_norm_epsilon": 1e-5, | |
"activation_function": "gelu_new", | |
"rotary_dim": 64, | |
"quantization": { | |
"format": "gguf", | |
"bits": 4 | |
} | |
} | |