Guilherme34 commited on
Commit
f9d4363
1 Parent(s): 73c7bc4

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +4 -28
  2. tokenizer_config.json +1 -1
special_tokens_map.json CHANGED
@@ -1,26 +1,8 @@
1
  {
2
  "additional_special_tokens": [
3
- {
4
- "content": "<fake_token_around_image>",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false
9
- },
10
- {
11
- "content": "<image>",
12
- "lstrip": false,
13
- "normalized": false,
14
- "rstrip": false,
15
- "single_word": false
16
- },
17
- {
18
- "content": "<end_of_utterance>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
  ],
25
  "bos_token": {
26
  "content": "<s>",
@@ -36,13 +18,7 @@
36
  "rstrip": false,
37
  "single_word": false
38
  },
39
- "pad_token": {
40
- "content": "<unk>",
41
- "lstrip": false,
42
- "normalized": false,
43
- "rstrip": false,
44
- "single_word": false
45
- },
46
  "unk_token": {
47
  "content": "<unk>",
48
  "lstrip": false,
 
1
  {
2
  "additional_special_tokens": [
3
+ "<fake_token_around_image>",
4
+ "<image>",
5
+ "<end_of_utterance>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  ],
7
  "bos_token": {
8
  "content": "<s>",
 
18
  "rstrip": false,
19
  "single_word": false
20
  },
21
+ "pad_token": "</s>",
 
 
 
 
 
 
22
  "unk_token": {
23
  "content": "<unk>",
24
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -60,7 +60,7 @@
60
  "clean_up_tokenization_spaces": false,
61
  "eos_token": "</s>",
62
  "model_max_length": 1000000000000000019884624838656,
63
- "pad_token": "<unk>",
64
  "processor_class": "Idefics2Processor",
65
  "sp_model_kwargs": {},
66
  "spaces_between_special_tokens": false,
 
60
  "clean_up_tokenization_spaces": false,
61
  "eos_token": "</s>",
62
  "model_max_length": 1000000000000000019884624838656,
63
+ "pad_token": "</s>",
64
  "processor_class": "Idefics2Processor",
65
  "sp_model_kwargs": {},
66
  "spaces_between_special_tokens": false,