Bingsu commited on
Commit
d5a6bb0
1 Parent(s): f7958c8

Upload processor

Browse files
merges.txt CHANGED
The diff for this file is too large to render. See raw diff
 
preprocessor_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "do_resize": true,
4
+ "feature_extractor_type": "ViltFeatureExtractor",
5
+ "image_mean": [
6
+ 0.5,
7
+ 0.5,
8
+ 0.5
9
+ ],
10
+ "image_std": [
11
+ 0.5,
12
+ 0.5,
13
+ 0.5
14
+ ],
15
+ "processor_class": "ViltProcessor",
16
+ "resample": 3,
17
+ "size": 384,
18
+ "size_divisor": 32
19
+ }
special_tokens_map.json CHANGED
@@ -1,14 +1,6 @@
1
  {
2
- "bos_token": "<s>",
3
  "cls_token": "<s>",
4
- "eos_token": "</s>",
5
- "mask_token": {
6
- "content": "<mask>",
7
- "lstrip": true,
8
- "normalized": true,
9
- "rstrip": false,
10
- "single_word": false
11
- },
12
  "pad_token": "<pad>",
13
  "sep_token": "</s>",
14
  "unk_token": "<unk>"
 
1
  {
 
2
  "cls_token": "<s>",
3
+ "mask_token": "<mask>",
 
 
 
 
 
 
 
4
  "pad_token": "<pad>",
5
  "sep_token": "</s>",
6
  "unk_token": "<unk>"
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
- "add_prefix_space": false,
3
- "bos_token": "<s>",
4
  "cls_token": "<s>",
5
- "eos_token": "</s>",
6
- "errors": "replace",
7
  "mask_token": "<mask>",
8
- "model_max_length": 40,
9
- "name_or_path": "Bingsu/my_vilt_b32_mlm",
10
  "pad_token": "<pad>",
 
11
  "sep_token": "</s>",
12
- "special_tokens_map_file": "./special_tokens_map.json",
13
- "tokenizer_class": "RobertaTokenizer",
14
- "trim_offsets": true,
 
 
15
  "unk_token": "<unk>"
16
  }
 
1
  {
 
 
2
  "cls_token": "<s>",
3
+ "do_lower_case": true,
 
4
  "mask_token": "<mask>",
5
+ "model_max_length": 512,
6
+ "name_or_path": "Bingsu/my_vilt_b32_mlm_not_trained",
7
  "pad_token": "<pad>",
8
+ "processor_class": "ViltProcessor",
9
  "sep_token": "</s>",
10
+ "special_tokens_map_file": "ko_BBPE_tokenizer_bert2\\special_tokens_map.json",
11
+ "strip_accents": null,
12
+ "tokenize_chinese_chars": true,
13
+ "tokenizer_class": "BertTokenizer",
14
+ "trust_remote_code": false,
15
  "unk_token": "<unk>"
16
  }
vocab.json CHANGED
The diff for this file is too large to render. See raw diff