uer commited on
Commit
3cc2614
1 Parent(s): 5a9b853

Create config.json

Browse files
Files changed (1) hide show
  1. config.json +28 -0
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_new",
3
+ "architectures": [
4
+ "GPT2LMHeadModel"
5
+ ],
6
+ "attn_pdrop": 0.1,
7
+ "embd_pdrop": 0.1,
8
+ "gradient_checkpointing": false,
9
+ "initializer_range": 0.02,
10
+ "layer_norm_epsilon": 1e-05,
11
+ "model_type": "gpt2",
12
+ "n_ctx": 1024,
13
+ "n_embd": 1024,
14
+ "n_head": 16,
15
+ "n_inner": null,
16
+ "n_layer": 24,
17
+ "n_positions": 1024,
18
+ "output_past": true,
19
+ "resid_pdrop": 0.1,
20
+ "task_specific_params": {
21
+ "text-generation": {
22
+ "do_sample": true,
23
+ "max_length": 320
24
+ }
25
+ },
26
+ "tokenizer_class": "BertTokenizer",
27
+ "vocab_size": 21128
28
+ }