XiangD-OSU commited on
Commit
65dfad7
1 Parent(s): 4d1c8cb

add tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +1 -0
  2. tokenizer_config.json +1 -0
  3. vocab.txt +0 -0
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "additional_special_tokens": ["[EMPTY]"]}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": true, "do_basic_tokenize": true, "never_split": null, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "empty_token": "[EMPTY]", "tokenize_chinese_chars": true, "strip_accents": null, "cell_trim_length": -1, "max_column_id": null, "max_row_id": null, "strip_column_names": false, "update_answer_coordinates": false, "min_question_length": null, "max_question_length": null, "model_max_length": 512, "additional_special_tokens": ["[EMPTY]"], "drop_rows_to_fit": false, "special_tokens_map_file": "/home/deng.595/.cache/huggingface/transformers/58830cd4530ce7c6523037a273e6d128ea75ede3b7e271ecacf7bec317dc9acb.852c05acd4c087ec9774e7ed56aeea5010c13056cc8bc37594b75b172416592c", "tokenizer_file": null, "name_or_path": "google/tapas-base", "tokenizer_class": "TapasTokenizer"}
vocab.txt ADDED
The diff for this file is too large to render. See raw diff