File size: 421 Bytes
180f75e
1
{"bos_token": "<START>", "eos_token": "<END>", "unk_token": "<UNK>", "special_tokens_map_file": "/home/dracoglacius/StemmlerProject/MIke_GPT2/WordBBPEModels/8_24_Word_Lvl_GPT2/8_24_fast_word_lvl_tokenizer_noestart/special_tokens_map.json", "name_or_path": "/home/dracoglacius/StemmlerProject/MIke_GPT2/WordBBPEModels/8_24_Word_Lvl_GPT2/8_24_fast_word_lvl_tokenizer_noestart", "tokenizer_class": "PreTrainedTokenizerFast"}