colerobertson commited on
Commit
3ee3101
1 Parent(s): 0b2b55d

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +0 -16
  2. tokenizer_config.json +37 -2
  3. vocab.json +34 -34
special_tokens_map.json CHANGED
@@ -1,20 +1,4 @@
1
  {
2
- "additional_special_tokens": [
3
- {
4
- "content": "<s>",
5
- "lstrip": false,
6
- "normalized": true,
7
- "rstrip": false,
8
- "single_word": false
9
- },
10
- {
11
- "content": "</s>",
12
- "lstrip": false,
13
- "normalized": true,
14
- "rstrip": false,
15
- "single_word": false
16
- }
17
- ],
18
  "bos_token": "<s>",
19
  "eos_token": "</s>",
20
  "pad_token": "[PAD]",
 
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "bos_token": "<s>",
3
  "eos_token": "</s>",
4
  "pad_token": "[PAD]",
tokenizer_config.json CHANGED
@@ -1,11 +1,46 @@
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "bos_token": "<s>",
 
3
  "do_lower_case": false,
4
  "eos_token": "</s>",
5
- "name_or_path": "./",
6
  "pad_token": "[PAD]",
7
  "replace_word_delimiter_char": " ",
8
- "special_tokens_map_file": null,
9
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
10
  "unk_token": "[UNK]",
11
  "word_delimiter_token": "|"
 
1
  {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": true,
6
+ "normalized": false,
7
+ "rstrip": true,
8
+ "single_word": false,
9
+ "special": false
10
+ },
11
+ "36": {
12
+ "content": "[UNK]",
13
+ "lstrip": true,
14
+ "normalized": false,
15
+ "rstrip": true,
16
+ "single_word": false,
17
+ "special": false
18
+ },
19
+ "37": {
20
+ "content": "<s>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "38": {
28
+ "content": "</s>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
  "bos_token": "<s>",
37
+ "clean_up_tokenization_spaces": true,
38
  "do_lower_case": false,
39
  "eos_token": "</s>",
40
+ "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
42
  "replace_word_delimiter_char": " ",
43
+ "target_lang": null,
44
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
45
  "unk_token": "[UNK]",
46
  "word_delimiter_token": "|"
vocab.json CHANGED
@@ -1,39 +1,39 @@
1
  {
2
- "'": 33,
3
- "0": 35,
4
- "1": 10,
5
- "2": 26,
6
- "3": 25,
7
- "5": 14,
8
- "9": 3,
9
  "[PAD]": 0,
10
  "[UNK]": 36,
11
- "a": 6,
12
- "b": 29,
13
- "c": 17,
14
- "d": 34,
15
- "e": 28,
16
  "f": 2,
17
- "g": 16,
18
- "h": 12,
19
- "i": 11,
20
- "j": 27,
21
- "k": 1,
22
- "l": 21,
23
- "m": 23,
24
- "n": 18,
25
- "o": 5,
26
- "p": 24,
27
- "q": 9,
28
- "r": 22,
29
- "s": 13,
30
- "t": 4,
31
- "u": 19,
32
- "v": 20,
33
- "w": 31,
34
- "x": 32,
35
- "y": 15,
36
- "z": 8,
37
- "|": 7,
38
- "£": 30
39
  }
 
1
  {
2
+ "'": 4,
3
+ "0": 20,
4
+ "1": 32,
5
+ "2": 33,
6
+ "3": 34,
7
+ "5": 5,
8
+ "9": 8,
9
  "[PAD]": 0,
10
  "[UNK]": 36,
11
+ "a": 15,
12
+ "b": 10,
13
+ "c": 35,
14
+ "d": 16,
15
+ "e": 12,
16
  "f": 2,
17
+ "g": 6,
18
+ "h": 14,
19
+ "i": 3,
20
+ "j": 22,
21
+ "k": 21,
22
+ "l": 28,
23
+ "m": 31,
24
+ "n": 23,
25
+ "o": 13,
26
+ "p": 11,
27
+ "q": 7,
28
+ "r": 17,
29
+ "s": 29,
30
+ "t": 9,
31
+ "u": 18,
32
+ "v": 19,
33
+ "w": 24,
34
+ "x": 27,
35
+ "y": 26,
36
+ "z": 30,
37
+ "|": 25,
38
+ "£": 1
39
  }