perlthoughts commited on
Commit
f7ea28d
1 Parent(s): fe032bc

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +5 -11
  2. tokenizer.json +0 -27
  3. tokenizer_config.json +7 -31
special_tokens_map.json CHANGED
@@ -1,7 +1,8 @@
1
  {
2
  "additional_special_tokens": [
3
- "<|end_of_turn|>",
4
- "<|pad_0|>"
 
5
  ],
6
  "bos_token": {
7
  "content": "<s>",
@@ -11,21 +12,14 @@
11
  "single_word": false
12
  },
13
  "eos_token": {
14
- "content": "<|end_of_turn|>",
15
  "lstrip": false,
16
  "normalized": false,
17
  "rstrip": false,
18
  "single_word": false
19
  },
20
  "pad_token": {
21
- "content": "<|end_of_turn|>",
22
- "lstrip": false,
23
- "normalized": false,
24
- "rstrip": false,
25
- "single_word": false
26
- },
27
- "sep_token": {
28
- "content": "<sep>",
29
  "lstrip": false,
30
  "normalized": false,
31
  "rstrip": false,
 
1
  {
2
  "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>"
6
  ],
7
  "bos_token": {
8
  "content": "<s>",
 
12
  "single_word": false
13
  },
14
  "eos_token": {
15
+ "content": "</s>",
16
  "lstrip": false,
17
  "normalized": false,
18
  "rstrip": false,
19
  "single_word": false
20
  },
21
  "pad_token": {
22
+ "content": "</s>",
 
 
 
 
 
 
 
23
  "lstrip": false,
24
  "normalized": false,
25
  "rstrip": false,
tokenizer.json CHANGED
@@ -29,33 +29,6 @@
29
  "rstrip": false,
30
  "normalized": false,
31
  "special": true
32
- },
33
- {
34
- "id": 32000,
35
- "content": "<|end_of_turn|>",
36
- "single_word": false,
37
- "lstrip": false,
38
- "rstrip": false,
39
- "normalized": false,
40
- "special": true
41
- },
42
- {
43
- "id": 32001,
44
- "content": "<|pad_0|>",
45
- "single_word": false,
46
- "lstrip": false,
47
- "rstrip": false,
48
- "normalized": false,
49
- "special": true
50
- },
51
- {
52
- "id": 32002,
53
- "content": "<sep>",
54
- "single_word": false,
55
- "lstrip": false,
56
- "rstrip": false,
57
- "normalized": false,
58
- "special": true
59
  }
60
  ],
61
  "normalizer": {
 
29
  "rstrip": false,
30
  "normalized": false,
31
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  }
33
  ],
34
  "normalizer": {
tokenizer_config.json CHANGED
@@ -23,45 +23,21 @@
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
26
- },
27
- "32000": {
28
- "content": "<|end_of_turn|>",
29
- "lstrip": false,
30
- "normalized": false,
31
- "rstrip": false,
32
- "single_word": false,
33
- "special": true
34
- },
35
- "32001": {
36
- "content": "<|pad_0|>",
37
- "lstrip": false,
38
- "normalized": false,
39
- "rstrip": false,
40
- "single_word": false,
41
- "special": true
42
- },
43
- "32002": {
44
- "content": "<sep>",
45
- "lstrip": false,
46
- "normalized": false,
47
- "rstrip": false,
48
- "single_word": false,
49
- "special": true
50
  }
51
  },
52
  "additional_special_tokens": [
53
- "<|end_of_turn|>",
54
- "<|pad_0|>"
 
55
  ],
56
  "bos_token": "<s>",
57
- "chat_template": "{{ bos_token }}{% for message in messages %}{{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>'}}{% endfor %}{% if add_generation_prompt %}{{ 'GPT4 Correct Assistant:' }}{% endif %}",
58
  "clean_up_tokenization_spaces": false,
59
  "device_map": "auto",
60
- "eos_token": "<|end_of_turn|>",
61
  "legacy": true,
62
- "model_max_length": 1000000000000000019884624838656,
63
- "pad_token": "<|end_of_turn|>",
64
- "sep_token": "<sep>",
65
  "sp_model_kwargs": {},
66
  "spaces_between_special_tokens": false,
67
  "tokenizer_class": "LlamaTokenizer",
 
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  }
27
  },
28
  "additional_special_tokens": [
29
+ "<unk>",
30
+ "<s>",
31
+ "</s>"
32
  ],
33
  "bos_token": "<s>",
34
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
35
  "clean_up_tokenization_spaces": false,
36
  "device_map": "auto",
37
+ "eos_token": "</s>",
38
  "legacy": true,
39
+ "model_max_length": 2048,
40
+ "pad_token": "</s>",
 
41
  "sp_model_kwargs": {},
42
  "spaces_between_special_tokens": false,
43
  "tokenizer_class": "LlamaTokenizer",