asahi417 commited on
Commit
ede9bf7
1 Parent(s): 044a4f8

add tokenizer

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
1
+ {"<mask>": 64000}
bpe.codes ADDED
The diff for this file is too large to render. See raw diff
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>"}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"normalization": false, "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 128, "special_tokens_map_file": null, "tokenizer_file": "/home/asahi/.cache/huggingface/transformers/549c8736388597bc4ccbbca97be7bc091a27b816ae34580c9c2b8dc43f33a8c4.12538392330240b275346001c954eab08a351a0c0f8d2757c7ef9c8e555dfbe0", "name_or_path": "cner_output/model/random_split/bertweet_base/best_model", "tokenizer_class": "BertweetTokenizer"}
vocab.txt ADDED
The diff for this file is too large to render. See raw diff