File size: 221 Bytes
c83f2b2
 
 
 
 
 
1
2
3
4
5
6
7
{
  "clean_up_tokenization_spaces": true,
  "model_max_length": 1000000000000000019884624838656,
  "special_tokens_map_file": "/content/tokenizer/special_tokens_map.json",
  "tokenizer_class": "PreTrainedTokenizerFast"
}