MiniGPT_S22 / checkpoints /lit_config.json
piyushgrover's picture
added init files
46ed6bb
raw
history blame
553 Bytes
{"name": "Llama-2-7b-chat-hf", "hf_config": {"org": "meta-llama", "name": "Llama-2-7b-chat-hf"}, "block_size": 4096, "vocab_size": 32000, "padding_multiple": 64, "padded_vocab_size": 32000, "n_layer": 32, "n_head": 32, "n_embd": 4096, "rotary_percentage": 1.0, "parallel_residual": false, "bias": false, "lm_head_bias": false, "n_query_groups": 32, "shared_attention_norm": false, "_norm_class": "RMSNorm", "norm_eps": 1e-05, "_mlp_class": "LLaMAMLP", "gelu_approximate": "none", "intermediate_size": 11008, "rope_condense_ratio": 1, "rope_base": 10000}