olm-chat-7b / open_lm /model_configs /open_lm_1b_old.json
henhenhahi111112's picture
Upload folder using huggingface_hub
af6e330 verified
raw
history blame contribute delete
304 Bytes
{
"hidden_dim": 2048,
"n_layers": 24,
"n_heads": 16,
"seq_len": 2048,
"vocab_size": 50432,
"post_embed_norm": false,
"weight_tying": false,
"qk_norm": false,
"ffn_type": "swiglu",
"model_norm": "default_layer_norm",
"positional_embedding_type": "head_rotary"
}