royokong commited on
Commit
563d8e9
1 Parent(s): c36d472

Upload processor

Browse files
special_tokens_map.json CHANGED
@@ -13,11 +13,5 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": {
17
- "content": "<|reserved_special_token_250|>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- }
23
  }
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "<|eot_id|>"
 
 
 
 
 
 
17
  }
tokenizer.json CHANGED
@@ -2306,6 +2306,15 @@
2306
  "rstrip": false,
2307
  "normalized": false,
2308
  "special": true
 
 
 
 
 
 
 
 
 
2309
  }
2310
  ],
2311
  "normalizer": null,
 
2306
  "rstrip": false,
2307
  "normalized": false,
2308
  "special": true
2309
+ },
2310
+ {
2311
+ "id": 128256,
2312
+ "content": "<image>",
2313
+ "single_word": false,
2314
+ "lstrip": false,
2315
+ "rstrip": false,
2316
+ "normalized": true,
2317
+ "special": false
2318
  }
2319
  ],
2320
  "normalizer": null,
tokenizer_config.json CHANGED
@@ -2047,6 +2047,14 @@
2047
  "rstrip": false,
2048
  "single_word": false,
2049
  "special": true
 
 
 
 
 
 
 
 
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
@@ -2058,7 +2066,7 @@
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 1000000000000000019884624838656,
2061
- "pad_token": "<|reserved_special_token_250|>",
2062
  "padding_side": "left",
2063
  "processor_class": "LlavaNextProcessor",
2064
  "tokenizer_class": "PreTrainedTokenizerFast"
 
2047
  "rstrip": false,
2048
  "single_word": false,
2049
  "special": true
2050
+ },
2051
+ "128256": {
2052
+ "content": "<image>",
2053
+ "lstrip": false,
2054
+ "normalized": true,
2055
+ "rstrip": false,
2056
+ "single_word": false,
2057
+ "special": false
2058
  }
2059
  },
2060
  "bos_token": "<|begin_of_text|>",
 
2066
  "attention_mask"
2067
  ],
2068
  "model_max_length": 1000000000000000019884624838656,
2069
+ "pad_token": "<|eot_id|>",
2070
  "padding_side": "left",
2071
  "processor_class": "LlavaNextProcessor",
2072
  "tokenizer_class": "PreTrainedTokenizerFast"