File size: 273 Bytes
2cdc1a3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
{
	"bos_token_id": 50256,
	"eos_token_ids": [
	50256
	  ],
	"initializer_range": 0.02,
	"layer_norm_epsilon": 1e-05,
	"model_type": "gpt2",
	"n_ctx": 1024,
	"n_embd": 1280,
	"n_head": 20,
	"n_layer": 36,
	"n_positions": 1024,
	"pad_token_id": 50256,
	"vocab_size": 50257
}