PreTrainedTokenizer(name_or_path='', vocab_size=47, model_max_len=1000000000000000019884624838656, is_fast=False, padding_side='right', special_tokens={'bos_token': '', 'eos_token': '', 'unk_token': '[UNK]', 'pad_token': '[PAD]'})