File size: 459 Bytes
30aed09
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
  "model_max_length": 512,
  "name_or_path": "/global/scratch/users/aniketh/PromoGen/K562_SentencePieceUnigramTokenizer_4096_log_bins_fast",
  "special_tokens": [
    "<BOS>",
    "<EOS>",
    "<PAD>",
    "<UNK>",
    "<CLS>",
    "<SEP>",
    "<MASK>"
  ],
  "special_tokens_map_file": "/global/scratch/users/aniketh/PromoGen/K562_SentencePieceUnigramTokenizer_4096_log_bins_fast/special_tokens_map.json",
  "tokenizer_class": "PreTrainedTokenizerFast"
}