dickdiss commited on
Commit
2fbf849
1 Parent(s): cc147e2

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -7,11 +7,11 @@
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|end_of_text|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "<|end_of_text|>"
17
  }
 
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "<|eot_id|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "<|eot_id|>"
17
  }
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 2048,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
tokenizer_config.json CHANGED
@@ -2051,17 +2051,14 @@
2051
  }
2052
  },
2053
  "bos_token": "<|begin_of_text|>",
 
2054
  "clean_up_tokenization_spaces": true,
2055
- "eos_token": "<|end_of_text|>",
2056
- "max_length": 2048,
2057
  "model_input_names": [
2058
  "input_ids",
2059
  "attention_mask"
2060
  ],
2061
  "model_max_length": 1000000000000000019884624838656,
2062
- "pad_token": "<|end_of_text|>",
2063
- "stride": 0,
2064
- "tokenizer_class": "PreTrainedTokenizerFast",
2065
- "truncation_side": "right",
2066
- "truncation_strategy": "longest_first"
2067
  }
 
2051
  }
2052
  },
2053
  "bos_token": "<|begin_of_text|>",
2054
+ "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}",
2055
  "clean_up_tokenization_spaces": true,
2056
+ "eos_token": "<|eot_id|>",
 
2057
  "model_input_names": [
2058
  "input_ids",
2059
  "attention_mask"
2060
  ],
2061
  "model_max_length": 1000000000000000019884624838656,
2062
+ "pad_token": "<|eot_id|>",
2063
+ "tokenizer_class": "PreTrainedTokenizerFast"
 
 
 
2064
  }