AlexHung29629 commited on
Commit
e318bea
·
verified ·
1 Parent(s): e599f38

Upload tokenizer

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +1 -4
tokenizer_config.json CHANGED
@@ -1,6 +1,4 @@
1
  {
2
- "add_bos_token": false,
3
- "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
@@ -35,13 +33,12 @@
35
  "bos_token": "<s>",
36
  "clean_up_tokenization_spaces": false,
37
  "eos_token": "</s>",
38
- "legacy": false,
39
  "model_max_length": 1000000000000000019884624838656,
40
  "pad_token": "<unk>",
41
  "sp_model_kwargs": {},
42
  "spaces_between_special_tokens": false,
43
  "tokenizer_class": "LlamaTokenizer",
44
- "tokenizer_file": "/home/ubuntu/disk/cache/hub/models--ocisd4--mistral_tokenizer_ext_dataprep/snapshots/5db4d4b8804943e2962e19882281f1254c454890/tokenizer.json",
45
  "unk_token": "<unk>",
46
  "use_default_system_prompt": false
47
  }
 
1
  {
 
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<unk>",
 
33
  "bos_token": "<s>",
34
  "clean_up_tokenization_spaces": false,
35
  "eos_token": "</s>",
36
+ "legacy": true,
37
  "model_max_length": 1000000000000000019884624838656,
38
  "pad_token": "<unk>",
39
  "sp_model_kwargs": {},
40
  "spaces_between_special_tokens": false,
41
  "tokenizer_class": "LlamaTokenizer",
 
42
  "unk_token": "<unk>",
43
  "use_default_system_prompt": false
44
  }