asahi417 commited on
Commit
d1642f1
1 Parent(s): d8515f3

add tokenizer

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<mask>": 64000}
bpe.codes ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>"}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"normalization": false, "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 128, "name_or_path": "cner_output/model/baseline_2021/bertweet_base_continuous/best_model", "special_tokens_map_file": "/home/c.c2042013/.cache/huggingface/transformers/bb0c2777b6eb6a05be4dc2b2adf35e25a8a273a43015576079f4b979bc928f57.0dc5b1041f62041ebbd23b1297f2f573769d5c97d8b7c28180ec86b8f6185aa8", "tokenizer_class": "BertweetTokenizer"}
vocab.txt ADDED
The diff for this file is too large to render. See raw diff