helboukkouri commited on
Commit
fd424b1
1 Parent(s): 915ce79

Add Tokenizer config & Update model config

Browse files
Files changed (4) hide show
  1. config.json +36 -0
  2. special_tokens_map.json +1 -0
  3. tokenizer_config.json +1 -0
  4. vocab.txt +0 -0
config.json CHANGED
@@ -3,6 +3,38 @@
3
  "CharacterBertModel"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  "hidden_act": "gelu",
7
  "hidden_dropout_prob": 0.1,
8
  "hidden_size": 768,
@@ -10,8 +42,12 @@
10
  "intermediate_size": 3072,
11
  "layer_norm_eps": 1e-12,
12
  "max_position_embeddings": 512,
 
 
13
  "model_type": "character_bert",
14
  "num_attention_heads": 12,
15
  "num_hidden_layers": 12,
 
 
16
  "type_vocab_size": 2
17
  }
 
3
  "CharacterBertModel"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
+ "character_embeddings_dim": 16,
7
+ "cnn_activation": "relu",
8
+ "cnn_filters": [
9
+ [
10
+ 1,
11
+ 32
12
+ ],
13
+ [
14
+ 2,
15
+ 32
16
+ ],
17
+ [
18
+ 3,
19
+ 64
20
+ ],
21
+ [
22
+ 4,
23
+ 128
24
+ ],
25
+ [
26
+ 5,
27
+ 256
28
+ ],
29
+ [
30
+ 6,
31
+ 512
32
+ ],
33
+ [
34
+ 7,
35
+ 1024
36
+ ]
37
+ ],
38
  "hidden_act": "gelu",
39
  "hidden_dropout_prob": 0.1,
40
  "hidden_size": 768,
 
42
  "intermediate_size": 3072,
43
  "layer_norm_eps": 1e-12,
44
  "max_position_embeddings": 512,
45
+ "max_word_length": 50,
46
+ "mlm_vocab_size": 100000,
47
  "model_type": "character_bert",
48
  "num_attention_heads": 12,
49
  "num_hidden_layers": 12,
50
+ "num_highway_layers": 2,
51
+ "tie_word_embeddings": false,
52
  "type_vocab_size": 2
53
  }
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"max_word_length": 50, "do_lower_case": true, "do_basic_tokenize": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
vocab.txt ADDED
File without changes