File size: 290 Bytes
72dc262 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
{
"architectures": [
"BloomForCausalLM"
],
"vocab_size": 50257,
"hidden_size": 4096,
"tie_word_embeddings": true,
"n_layer": 30,
"hidden_dropout": 0.0,
"layer_norm_epsilon": 1e-05,
"n_head": 32,
"attention_dropout": 0.0,
"model_type": "bloom"
}
|