lu-vae commited on
Commit
9ffff7c
1 Parent(s): bd7328f

Upload tokenizer

Browse files
qwen.tiktoken ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {}
tokenizer_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {},
3
+ "additional_special_tokens": [],
4
+ "auto_map": {
5
+ "AutoTokenizer": [
6
+ "Qwen/Qwen-14B--tokenization_qwen.QWenTokenizer",
7
+ null
8
+ ]
9
+ },
10
+ "clean_up_tokenization_spaces": true,
11
+ "model_max_length": 8192,
12
+ "tokenizer_class": "QWenTokenizer",
13
+ "tokenizer_file": null
14
+ }