ZWJYYC commited on
Commit
dbb745e
1 Parent(s): ad6ac1b

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +45 -0
config.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "attention_probs_dropout_prob": 0.1,
3
+ "cell": {},
4
+ "emb_size": 564,
5
+ "fix_config": {
6
+ "sample_hidden_size": 564,
7
+ "sample_intermediate_sizes": [
8
+ 1024,
9
+ 1024,
10
+ 1024,
11
+ 1024,
12
+ 1024
13
+ ],
14
+ "sample_layer_num": 5,
15
+ "sample_num_attention_heads": [
16
+ 12,
17
+ 12,
18
+ 12,
19
+ 12,
20
+ 12
21
+ ],
22
+ "sample_qkv_sizes": [
23
+ 528,
24
+ 528,
25
+ 528,
26
+ 528,
27
+ 528
28
+ ]
29
+ },
30
+ "hidden_act": "gelu",
31
+ "hidden_dropout_prob": 0.1,
32
+ "hidden_size": 564,
33
+ "initializer_range": 0.02,
34
+ "intermediate_size": 1024,
35
+ "layer_norm_eps": 1e-12,
36
+ "max_position_embeddings": 512,
37
+ "num_attention_heads": 12,
38
+ "num_hidden_layers": 5,
39
+ "pre_trained": "",
40
+ "qkv_size": 528,
41
+ "structure": [],
42
+ "training": "",
43
+ "type_vocab_size": 2,
44
+ "vocab_size": 30522
45
+ }