soccerking commited on
Commit
9c1774e
1 Parent(s): 3ad39ab

Upload tokenizer_config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +2 -8
tokenizer_config.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
4
- "add_prefix_space": true,
5
  "added_tokens_decoder": {
6
  "0": {
7
  "content": "<unk>",
@@ -34,16 +33,11 @@
34
  "clean_up_tokenization_spaces": false,
35
  "eos_token": "</s>",
36
  "legacy": true,
37
- "max_length": 2048,
38
  "model_max_length": 1000000000000000019884624838656,
39
- "pad_token": "</s>",
40
  "sp_model_kwargs": {},
41
  "spaces_between_special_tokens": false,
42
- "stride": 0,
43
  "tokenizer_class": "LlamaTokenizer",
44
- "truncation_side": "right",
45
- "truncation_strategy": "longest_first",
46
  "unk_token": "<unk>",
47
- "use_default_system_prompt": false,
48
- "use_fast": true
49
  }
 
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
 
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
  "legacy": true,
 
36
  "model_max_length": 1000000000000000019884624838656,
37
+ "pad_token": null,
38
  "sp_model_kwargs": {},
39
  "spaces_between_special_tokens": false,
 
40
  "tokenizer_class": "LlamaTokenizer",
 
 
41
  "unk_token": "<unk>",
42
+ "use_default_system_prompt": false
 
43
  }