File size: 496 Bytes
34643e0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
{
"model_class": "llama2",
"model_size": "7b",
"architectures": [
"Llama2ForCausalLM"
],
"dim": 4096,
"n_layers": 32,
"n_heads": 32,
"n_kv_heads": null,
"vocab_size": 32000,
"multiple_of": 256,
"ffn_dim_multiplier": null,
"norm_eps": 1e-5,
"max_batch_size": 32,
"max_seq_len": 2048,
"bos_token_id":1,
"eos_token_id":2,
"pad_token_id":-1,
"torch_dtype": "float16",
"pretraining_base": "llama2-7b-2t-tokens",
"model_repo_folder_path": "llama2-7b-base"
}
|