deepparag commited on
Commit
f3cdd79
1 Parent(s): 20afa53

add tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +1 -23
  2. tokenizer_config.json +1 -32
special_tokens_map.json CHANGED
@@ -1,23 +1 @@
1
- {
2
- "bos_token": {
3
- "content": "<|endoftext|>",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "unk_token": {
17
- "content": "<|endoftext|>",
18
- "lstrip": false,
19
- "normalized": true,
20
- "rstrip": false,
21
- "single_word": false
22
- }
23
- }
 
1
+ {"bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tokenizer_config.json CHANGED
@@ -1,32 +1 @@
1
- {
2
- "add_prefix_space": false,
3
- "bos_token": {
4
- "__type": "AddedToken",
5
- "content": "<|endoftext|>",
6
- "lstrip": false,
7
- "normalized": true,
8
- "rstrip": false,
9
- "single_word": false
10
- },
11
- "eos_token": {
12
- "__type": "AddedToken",
13
- "content": "<|endoftext|>",
14
- "lstrip": false,
15
- "normalized": true,
16
- "rstrip": false,
17
- "single_word": false
18
- },
19
- "errors": "replace",
20
- "model_max_length": 1024,
21
- "name_or_path": "deepparag/Aeona-Beta",
22
- "special_tokens_map_file": null,
23
- "tokenizer_class": "GPT2Tokenizer",
24
- "unk_token": {
25
- "__type": "AddedToken",
26
- "content": "<|endoftext|>",
27
- "lstrip": false,
28
- "normalized": true,
29
- "rstrip": false,
30
- "single_word": false
31
- }
32
- }
 
1
+ {"unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "add_prefix_space": false, "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "transfaeries/DialoGPT-medium-Discord-1.0", "errors": "replace", "tokenizer_class": "GPT2Tokenizer"}