File size: 214 Bytes
61231af
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
{
  "clean_up_tokenization_spaces": true,
  "model_input_names": [
    "input_ids",
    "attention_mask"
  ],
  "model_max_length": 1000000000000000019884624838656,
  "tokenizer_class": "PreTrainedTokenizerFast"
}