ariesutiono commited on
Commit
2c6c6fc
1 Parent(s): a480337

End of training

Browse files
added_tokens.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "''": 30522,
3
+ "``": 30523,
4
+ "fw": 30524,
5
+ "jjr": 30525,
6
+ "jjs": 30526,
7
+ "ls": 30527,
8
+ "nn": 30528,
9
+ "nnp": 30529,
10
+ "nnps": 30530,
11
+ "nns": 30531,
12
+ "nn|sym": 30532,
13
+ "pdt": 30533,
14
+ "pos": 30534,
15
+ "prp": 30535,
16
+ "prp$": 30536,
17
+ "rbr": 30537,
18
+ "rbs": 30538,
19
+ "rp": 30539,
20
+ "sym": 30540,
21
+ "vb": 30541,
22
+ "vbd": 30542,
23
+ "vbg": 30543,
24
+ "vbn": 30544,
25
+ "vbp": 30545,
26
+ "vbz": 30546,
27
+ "wdt": 30547,
28
+ "wp": 30548,
29
+ "wp$": 30549,
30
+ "wrb": 30550
31
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "do_basic_tokenize": true,
4
+ "do_lower_case": true,
5
+ "mask_token": "[MASK]",
6
+ "model_max_length": 512,
7
+ "name_or_path": "bert-base-uncased",
8
+ "never_split": null,
9
+ "pad_token": "[PAD]",
10
+ "sep_token": "[SEP]",
11
+ "special_tokens_map_file": null,
12
+ "strip_accents": null,
13
+ "tokenize_chinese_chars": true,
14
+ "tokenizer_class": "BertTokenizer",
15
+ "unk_token": "[UNK]"
16
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff