File size: 263 Bytes
405f766
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
{
  "activation": "gelu",
  "bias": false,
  "d_model": 1536,
  "dff": null,
  "dropout_rate": 0.0,
  "max_block_size": 1024,
  "n_heads": 24,
  "n_layers": 24,
  "norm_first": true,
  "pos_enc_type": "RoPE",
  "use_flash_attention": true,
  "vocab_size": 50304
}