File size: 425 Bytes
0365203
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
{
  "add_bos_token": false,
  "add_prefix_space": false,
  "bos_token": "<|endoftext|>",
  "eos_token": "<|endoftext|>",
  "errors": "replace",
  "full_tokenizer_file": null,
  "model_max_length": 1000000000000000019884624838656,
  "name_or_path": "pierreguillou/gpt2-small-portuguese",
  "pad_token": "<|endoftext|>",
  "special_tokens_map_file": null,
  "tokenizer_class": "GPT2Tokenizer",
  "unk_token": "<|endoftext|>"
}