SalmanFaroz commited on
Commit
dd58a33
1 Parent(s): 94b0c70

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +3 -21
  2. tokenizer_config.json +1 -1
special_tokens_map.json CHANGED
@@ -1,26 +1,8 @@
1
  {
2
  "additional_special_tokens": [
3
- {
4
- "content": "<fake_token_around_image>",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false
9
- },
10
- {
11
- "content": "<image>",
12
- "lstrip": false,
13
- "normalized": false,
14
- "rstrip": false,
15
- "single_word": false
16
- },
17
- {
18
- "content": "<end_of_utterance>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
  ],
25
  "bos_token": {
26
  "content": "<s>",
 
1
  {
2
  "additional_special_tokens": [
3
+ "<fake_token_around_image>",
4
+ "<image>",
5
+ "<end_of_utterance>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  ],
7
  "bos_token": {
8
  "content": "<s>",
tokenizer_config.json CHANGED
@@ -58,8 +58,8 @@
58
  ],
59
  "bos_token": "<s>",
60
  "clean_up_tokenization_spaces": false,
61
- "do_image_splitting": true,
62
  "eos_token": "</s>",
 
63
  "model_max_length": 1000000000000000019884624838656,
64
  "pad_token": "<unk>",
65
  "processor_class": "Idefics2Processor",
 
58
  ],
59
  "bos_token": "<s>",
60
  "clean_up_tokenization_spaces": false,
 
61
  "eos_token": "</s>",
62
+ "legacy": false,
63
  "model_max_length": 1000000000000000019884624838656,
64
  "pad_token": "<unk>",
65
  "processor_class": "Idefics2Processor",