HebrewGPT-296M / config.json
ronnengmail's picture
Upload folder using huggingface_hub
e96b55c verified
raw
history blame contribute delete
576 Bytes
{
"architectures": ["HebrewGPTForCausalLM"],
"model_type": "hebrew-gpt",
"auto_map": {
"AutoConfig": "configuration_hebrewgpt.HebrewGPTConfig",
"AutoModel": "modeling_hebrewgpt.HebrewGPTForCausalLM",
"AutoModelForCausalLM": "modeling_hebrewgpt.HebrewGPTForCausalLM"
},
"vocab_size": 8192,
"hidden_size": 1536,
"num_hidden_layers": 10,
"num_attention_heads": 12,
"head_dim": 128,
"intermediate_size": 4096,
"max_position_embeddings": 512,
"dropout": 0.0,
"rope_theta": 10000.0,
"tie_word_embeddings": true,
"torch_dtype": "bfloat16"
}