File size: 1,433 Bytes
a1e8b97
 
 
264e219
a1e8b97
 
 
 
 
 
264e219
 
a1e8b97
 
 
 
 
 
264e219
 
a1e8b97
 
 
 
 
 
264e219
 
deb0d8f
a1e8b97
 
 
 
ac67afa
264e219
 
 
ac67afa
 
 
 
a1e8b97
 
 
264e219
 
deb0d8f
264e219
deb0d8f
a1e8b97
264e219
a1e8b97
264e219
a1e8b97
 
264e219
a1e8b97
deb0d8f
 
264e219
a1e8b97
 
deb0d8f
a1e8b97
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
{
  "added_tokens_decoder": {
    "0": {
      "content": "[PAD]",
      "lstrip": false,
      "normalized": false,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "100": {
      "content": "[UNK]",
      "lstrip": false,
      "normalized": false,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "101": {
      "content": "[CLS]",
      "lstrip": false,
      "normalized": false,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "102": {
      "content": "[SEP]",
      "lstrip": false,
      "normalized": false,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "103": {
      "content": "[MASK]",
      "lstrip": false,
      "normalized": false,
      "rstrip": false,
      "single_word": false,
      "special": true
    }
  },
  "clean_up_tokenization_spaces": true,
  "cls_token": "[CLS]",
  "do_basic_tokenize": true,
  "do_lower_case": true,
  "mask_token": "[MASK]",
  "max_length": 128,
  "model_max_length": 512,
  "never_split": null,
  "pad_to_multiple_of": null,
  "pad_token": "[PAD]",
  "pad_token_type_id": 0,
  "padding_side": "right",
  "sep_token": "[SEP]",
  "stride": 0,
  "strip_accents": null,
  "tokenize_chinese_chars": true,
  "tokenizer_class": "BertTokenizer",
  "truncation_side": "right",
  "truncation_strategy": "longest_first",
  "unk_token": "[UNK]"
}