stanleychu2 commited on
Commit
dd2d3b4
1 Parent(s): dc93870

add tokenizer

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<end>": 54944, "<task>": 54945, "<chat>": 54946}
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "__start__", "eos_token": "<end>", "unk_token": "__unk__", "pad_token": "__null__", "additional_special_tokens": ["<task>", "<chat>"]}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "__unk__", "bos_token": "__start__", "eos_token": "<end>", "pad_token": "__null__", "special_tokens_map_file": null, "tokenizer_file": null, "name_or_path": "./output/blenderbot_user_simulator/checkpoint-39504", "model_max_length": 512, "tokenizer_class": "BlenderbotSmallTokenizer"}
vocab.json ADDED
The diff for this file is too large to render. See raw diff