firstgradeai commited on
Commit
0a00e0b
1 Parent(s): 2783c45

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -17,7 +17,7 @@
17
  "rstrip": false,
18
  "single_word": false
19
  },
20
- "pad_token": "<unk>",
21
  "unk_token": {
22
  "content": "<unk>",
23
  "lstrip": false,
 
17
  "rstrip": false,
18
  "single_word": false
19
  },
20
+ "pad_token": "</s>",
21
  "unk_token": {
22
  "content": "<unk>",
23
  "lstrip": false,
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
tokenizer_config.json CHANGED
@@ -60,7 +60,7 @@
60
  "eos_token": "</s>",
61
  "legacy": false,
62
  "model_max_length": 1000000000000000019884624838656,
63
- "pad_token": "<unk>",
64
  "padding_side": "right",
65
  "sp_model_kwargs": {},
66
  "tokenizer_class": "LlamaTokenizer",
 
60
  "eos_token": "</s>",
61
  "legacy": false,
62
  "model_max_length": 1000000000000000019884624838656,
63
+ "pad_token": "</s>",
64
  "padding_side": "right",
65
  "sp_model_kwargs": {},
66
  "tokenizer_class": "LlamaTokenizer",