system HF staff commited on
Commit
c6f11e6
1 Parent(s): 94b8809

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +101 -0
config.json ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MobileBertForTokenClassification"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.1,
6
+ "classifier_activation": false,
7
+ "embedding_size": 128,
8
+ "hidden_act": "relu",
9
+ "hidden_dropout_prob": 0.0,
10
+ "hidden_size": 512,
11
+ "id2label": {
12
+ "0": "O",
13
+ "1": "CC",
14
+ "2": "CD",
15
+ "3": "DT",
16
+ "4": "EX",
17
+ "5": "FW",
18
+ "6": "IN",
19
+ "7": "JJ",
20
+ "8": "JJR",
21
+ "9": "JJS",
22
+ "10": "MD",
23
+ "11": "NN",
24
+ "12": "NNP",
25
+ "13": "NNPS",
26
+ "14": "NNS",
27
+ "15": "PDT",
28
+ "16": "POS",
29
+ "17": "PRP",
30
+ "18": "RB",
31
+ "19": "RBR",
32
+ "20": "RBS",
33
+ "21": "RP",
34
+ "22": "SYM",
35
+ "23": "TO",
36
+ "24": "UH",
37
+ "25": "VB",
38
+ "26": "VBD",
39
+ "27": "VBG",
40
+ "28": "VBN",
41
+ "29": "VBP",
42
+ "30": "VBZ",
43
+ "31": "WDT",
44
+ "32": "WP",
45
+ "33": "WRB"
46
+ },
47
+ "initializer_range": 0.02,
48
+ "intermediate_size": 512,
49
+ "intra_bottleneck_size": 128,
50
+ "key_query_shared_bottleneck": true,
51
+ "label2id": {
52
+ "CC": 1,
53
+ "CD": 2,
54
+ "DT": 3,
55
+ "EX": 4,
56
+ "FW": 5,
57
+ "IN": 6,
58
+ "JJ": 7,
59
+ "JJR": 8,
60
+ "JJS": 9,
61
+ "MD": 10,
62
+ "NN": 11,
63
+ "NNP": 12,
64
+ "NNPS": 13,
65
+ "NNS": 14,
66
+ "O": 0,
67
+ "PDT": 15,
68
+ "POS": 16,
69
+ "PRP": 17,
70
+ "RB": 18,
71
+ "RBR": 19,
72
+ "RBS": 20,
73
+ "RP": 21,
74
+ "SYM": 22,
75
+ "TO": 23,
76
+ "UH": 24,
77
+ "VB": 25,
78
+ "VBD": 26,
79
+ "VBG": 27,
80
+ "VBN": 28,
81
+ "VBP": 29,
82
+ "VBZ": 30,
83
+ "WDT": 31,
84
+ "WP": 32,
85
+ "WRB": 33
86
+ },
87
+ "layer_norm_eps": 1e-12,
88
+ "max_position_embeddings": 512,
89
+ "model_type": "mobilebert",
90
+ "normalization_type": "no_norm",
91
+ "num_attention_heads": 4,
92
+ "num_feedforward_networks": 4,
93
+ "num_hidden_layers": 24,
94
+ "pad_token_id": 0,
95
+ "trigram_input": true,
96
+ "true_hidden_size": 128,
97
+ "type_vocab_size": 2,
98
+ "use_bottleneck": true,
99
+ "use_bottleneck_attention": false,
100
+ "vocab_size": 30522
101
+ }