danielhanchen commited on
Commit
f59e9e4
1 Parent(s): a504f88

Upload tokenizer

Browse files
README.md CHANGED
@@ -8,7 +8,6 @@ tags:
8
  - transformers
9
  - llama
10
  - llama-3
11
-
12
  ---
13
 
14
  # Finetune Mistral, Gemma, Llama 2-5x faster with 70% less memory via Unsloth!
 
8
  - transformers
9
  - llama
10
  - llama-3
 
11
  ---
12
 
13
  # Finetune Mistral, Gemma, Llama 2-5x faster with 70% less memory via Unsloth!
special_tokens_map.json CHANGED
@@ -12,5 +12,6 @@
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
- }
 
16
  }
 
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
+ },
16
+ "pad_token": "<|end_of_text|>"
17
  }
tokenizer.json CHANGED
@@ -2348,7 +2348,6 @@
2348
  "end_of_word_suffix": null,
2349
  "fuse_unk": false,
2350
  "byte_fallback": false,
2351
- "ignore_merges": false,
2352
  "vocab": {
2353
  "!": 0,
2354
  "\"": 1,
 
2348
  "end_of_word_suffix": null,
2349
  "fuse_unk": false,
2350
  "byte_fallback": false,
 
2351
  "vocab": {
2352
  "!": 0,
2353
  "\"": 1,
tokenizer_config.json CHANGED
@@ -2050,7 +2050,6 @@
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
2053
- "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",
2054
  "clean_up_tokenization_spaces": true,
2055
  "eos_token": "<|end_of_text|>",
2056
  "model_input_names": [
@@ -2058,6 +2057,7 @@
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 8192,
2061
- "padding_side": "right",
 
2062
  "tokenizer_class": "PreTrainedTokenizerFast"
2063
  }
 
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
 
2053
  "clean_up_tokenization_spaces": true,
2054
  "eos_token": "<|end_of_text|>",
2055
  "model_input_names": [
 
2057
  "attention_mask"
2058
  ],
2059
  "model_max_length": 8192,
2060
+ "pad_token": "<|end_of_text|>",
2061
+ "padding_side": "left",
2062
  "tokenizer_class": "PreTrainedTokenizerFast"
2063
  }