danielhanchen commited on
Commit
8dee568
1 Parent(s): 09d417d

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -18,13 +18,7 @@
18
  "rstrip": false,
19
  "single_word": false
20
  },
21
- "pad_token": {
22
- "content": "</s>",
23
- "lstrip": false,
24
- "normalized": false,
25
- "rstrip": false,
26
- "single_word": false
27
- },
28
  "unk_token": {
29
  "content": "<unk>",
30
  "lstrip": false,
 
18
  "rstrip": false,
19
  "single_word": false
20
  },
21
+ "pad_token": "<unk>",
 
 
 
 
 
 
22
  "unk_token": {
23
  "content": "<unk>",
24
  "lstrip": false,
tokenizer.json CHANGED
@@ -134,6 +134,7 @@
134
  "end_of_word_suffix": null,
135
  "fuse_unk": true,
136
  "byte_fallback": true,
 
137
  "vocab": {
138
  "<unk>": 0,
139
  "<s>": 1,
 
134
  "end_of_word_suffix": null,
135
  "fuse_unk": true,
136
  "byte_fallback": true,
137
+ "ignore_merges": false,
138
  "vocab": {
139
  "<unk>": 0,
140
  "<s>": 1,
tokenizer_config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
 
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
@@ -37,9 +38,9 @@
37
  "clean_up_tokenization_spaces": false,
38
  "eos_token": "</s>",
39
  "legacy": true,
40
- "model_max_length": 4096,
41
- "pad_token": "</s>",
42
- "padding_side": "right",
43
  "sp_model_kwargs": {},
44
  "spaces_between_special_tokens": false,
45
  "tokenizer_class": "LlamaTokenizer",
 
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
4
+ "add_prefix_space": null,
5
  "added_tokens_decoder": {
6
  "0": {
7
  "content": "<unk>",
 
38
  "clean_up_tokenization_spaces": false,
39
  "eos_token": "</s>",
40
  "legacy": true,
41
+ "model_max_length": 1000000000000000019884624838656,
42
+ "pad_token": "<unk>",
43
+ "padding_side": "left",
44
  "sp_model_kwargs": {},
45
  "spaces_between_special_tokens": false,
46
  "tokenizer_class": "LlamaTokenizer",