Text Generation
Transformers
Safetensors
imp_phi3
conversational
custom_code
Oyoy1235 commited on
Commit
a4dc48f
1 Parent(s): c2a58eb

update tokenizer bugs

Browse files
special_tokens_map.json CHANGED
@@ -7,7 +7,7 @@
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "</s>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
 
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "<|endoftext|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
tokenizer.json CHANGED
@@ -3,15 +3,6 @@
3
  "truncation": null,
4
  "padding": null,
5
  "added_tokens": [
6
- {
7
- "id": 32011,
8
- "content": "<image>",
9
- "single_word": false,
10
- "lstrip": false,
11
- "rstrip": false,
12
- "normalized": false,
13
- "special": true
14
- },
15
  {
16
  "id": 0,
17
  "content": "<unk>",
@@ -137,6 +128,15 @@
137
  "rstrip": false,
138
  "normalized": false,
139
  "special": true
 
 
 
 
 
 
 
 
 
140
  }
141
  ],
142
  "normalizer": {
 
3
  "truncation": null,
4
  "padding": null,
5
  "added_tokens": [
 
 
 
 
 
 
 
 
 
6
  {
7
  "id": 0,
8
  "content": "<unk>",
 
128
  "rstrip": false,
129
  "normalized": false,
130
  "special": true
131
+ },
132
+ {
133
+ "id": 32011,
134
+ "content": "<image>",
135
+ "single_word": false,
136
+ "lstrip": false,
137
+ "rstrip": false,
138
+ "normalized": false,
139
+ "special": false
140
  }
141
  ],
142
  "normalizer": {
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
tokenizer_config.json CHANGED
@@ -2,14 +2,6 @@
2
  "add_bos_token": true,
3
  "add_eos_token": false,
4
  "added_tokens_decoder": {
5
- "32011": {
6
- "content": "<image>",
7
- "lstrip": false,
8
- "normalized": false,
9
- "rstrip": false,
10
- "single_word": false,
11
- "special": true
12
- },
13
  "0": {
14
  "content": "<unk>",
15
  "lstrip": false,
@@ -121,12 +113,20 @@
121
  "rstrip": false,
122
  "single_word": false,
123
  "special": true
 
 
 
 
 
 
 
 
124
  }
125
  },
126
  "bos_token": "<s>",
127
  "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') %}{{'<|user|>' + '\n' + message['content'] + '<|end|>' + '\n' + '<|assistant|>' + '\n'}}{% elif (message['role'] == 'assistant') %}{{message['content'] + '<|end|>' + '\n'}}{% endif %}{% endfor %}",
128
  "clean_up_tokenization_spaces": false,
129
- "eos_token": "</s>",
130
  "legacy": false,
131
  "model_max_length": 4096,
132
  "pad_token": "<|endoftext|>",
 
2
  "add_bos_token": true,
3
  "add_eos_token": false,
4
  "added_tokens_decoder": {
 
 
 
 
 
 
 
 
5
  "0": {
6
  "content": "<unk>",
7
  "lstrip": false,
 
113
  "rstrip": false,
114
  "single_word": false,
115
  "special": true
116
+ },
117
+ "32011": {
118
+ "content": "<image>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
  }
125
  },
126
  "bos_token": "<s>",
127
  "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') %}{{'<|user|>' + '\n' + message['content'] + '<|end|>' + '\n' + '<|assistant|>' + '\n'}}{% elif (message['role'] == 'assistant') %}{{message['content'] + '<|end|>' + '\n'}}{% endif %}{% endfor %}",
128
  "clean_up_tokenization_spaces": false,
129
+ "eos_token": "<|endoftext|>",
130
  "legacy": false,
131
  "model_max_length": 4096,
132
  "pad_token": "<|endoftext|>",