bigcode_tokenizer / tokenizer_config.json
cakiki's picture
Upload tokenizer
6625ce4
raw
history blame contribute delete
No virus
326 Bytes
{
"name_or_path": "cakiki/bigcode_tokenizer",
"special_tokens_map_file": "/home/christopher/.cache/huggingface/hub/models--cakiki--bigcode_tokenizer/snapshots/e96afb14b6c9d15112592b7c41cde4953fcfc189/special_tokens_map.json",
"tokenizer_class": "PreTrainedTokenizerFast",
"unk_token": "[UNK]",
"vocab_size": 70000
}