ekojs commited on
Commit
1f96903
1 Parent(s): 8be775a

Update tokenizer_config.json

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +56 -11
tokenizer_config.json CHANGED
@@ -25,24 +25,69 @@
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  }
29
  },
30
  "auto_map": {
31
  "AutoTokenizer": [
32
- "internlm/internlm2-20b--tokenization_internlm.InternLMTokenizer",
33
- null
34
  ]
35
  },
36
  "bos_token": "<s>",
37
- "clean_up_tokenization_spaces": true,
 
38
  "eos_token": "</s>",
39
- "legacy": true,
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "</s>",
42
- "sp_model_kwargs": {},
43
- "spaces_between_special_tokens": false,
44
- "tokenizer_class": "LlamaTokenizer",
45
- "trust_remote_code": false,
46
- "unk_token": "<unk>",
47
- "use_default_system_prompt": false
48
- }
 
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
28
+ },
29
+ "92538": {
30
+ "content": "<|plugin|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "92539": {
38
+ "content": "<|interpreter|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "92540": {
46
+ "content": "<|action_end|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "92541": {
54
+ "content": "<|action_start|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "92542": {
62
+ "content": "<|im_end|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "92543": {
70
+ "content": "<|im_start|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
  }
77
  },
78
  "auto_map": {
79
  "AutoTokenizer": [
80
+ "LlamaTokenizer",
81
+ "LlamaTokenizerFast"
82
  ]
83
  },
84
  "bos_token": "<s>",
85
+ "clean_up_tokenization_spaces": false,
86
+ "decode_with_prefix_space": false,
87
  "eos_token": "</s>",
 
88
  "model_max_length": 1000000000000000019884624838656,
89
  "pad_token": "</s>",
90
+ "sp_model_kwargs": null,
91
+ "tokenizer_class": "LlamaTokenizerFast",
92
+ "unk_token": "<unk>"
93
+ }