tuanio commited on
Commit
faf4d9c
1 Parent(s): 7002f28

Upload tokenizer

Browse files
Files changed (4) hide show
  1. added_tokens.json +5 -0
  2. special_tokens_map.json +29 -0
  3. tokenizer_config.json +13 -0
  4. vocab.json +147 -0
added_tokens.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "</s>": 146,
3
+ "<s>": 145,
4
+ "[PAD]": 147
5
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "</s>",
12
+ "lstrip": false,
13
+ "normalized": true,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ },
17
+ {
18
+ "content": "[PAD]",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ ],
25
+ "bos_token": "<s>",
26
+ "eos_token": "</s>",
27
+ "pad_token": "[PAD]",
28
+ "unk_token": "[UNK]"
29
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "clean_up_tokenization_spaces": true,
4
+ "do_lower_case": false,
5
+ "eos_token": "</s>",
6
+ "model_max_length": 1000000000000000019884624838656,
7
+ "pad_token": "[PAD]",
8
+ "replace_word_delimiter_char": " ",
9
+ "target_lang": null,
10
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
11
+ "unk_token": "[UNK]",
12
+ "word_delimiter_token": "|"
13
+ }
vocab.json ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "!": 1,
3
+ "#": 2,
4
+ "&": 3,
5
+ "'": 4,
6
+ "(": 5,
7
+ ")": 6,
8
+ ",": 7,
9
+ "-": 8,
10
+ ".": 9,
11
+ "/": 10,
12
+ "0": 11,
13
+ "1": 12,
14
+ "2": 13,
15
+ "3": 14,
16
+ "4": 15,
17
+ "5": 16,
18
+ "6": 17,
19
+ "7": 18,
20
+ "8": 19,
21
+ "9": 20,
22
+ "<": 21,
23
+ "=": 22,
24
+ ">": 23,
25
+ "?": 24,
26
+ "@": 25,
27
+ "_": 26,
28
+ "a": 27,
29
+ "b": 28,
30
+ "c": 29,
31
+ "d": 30,
32
+ "e": 31,
33
+ "f": 32,
34
+ "g": 33,
35
+ "h": 34,
36
+ "i": 35,
37
+ "j": 36,
38
+ "k": 37,
39
+ "l": 38,
40
+ "m": 39,
41
+ "n": 40,
42
+ "o": 41,
43
+ "p": 42,
44
+ "q": 43,
45
+ "r": 44,
46
+ "s": 45,
47
+ "t": 46,
48
+ "u": 47,
49
+ "v": 48,
50
+ "w": 49,
51
+ "x": 50,
52
+ "y": 51,
53
+ "z": 52,
54
+ "|": 0,
55
+ "£": 53,
56
+ "à": 54,
57
+ "á": 55,
58
+ "â": 56,
59
+ "ã": 57,
60
+ "è": 58,
61
+ "é": 59,
62
+ "ê": 60,
63
+ "ì": 61,
64
+ "í": 62,
65
+ "ð": 63,
66
+ "ò": 64,
67
+ "ó": 65,
68
+ "ô": 66,
69
+ "õ": 67,
70
+ "ö": 68,
71
+ "ù": 69,
72
+ "ú": 70,
73
+ "ý": 71,
74
+ "ă": 72,
75
+ "ć": 73,
76
+ "đ": 74,
77
+ "ġ": 75,
78
+ "ĩ": 76,
79
+ "ij": 77,
80
+ "ũ": 78,
81
+ "ơ": 79,
82
+ "ư": 80,
83
+ "ǎ": 81,
84
+ "ǡ": 82,
85
+ "̀": 83,
86
+ "́": 84,
87
+ "̃": 85,
88
+ "̉": 86,
89
+ "̣": 87,
90
+ "ۃ": 88,
91
+ "ۙ": 89,
92
+ "۟": 90,
93
+ "ۣ": 91,
94
+ "ạ": 92,
95
+ "ả": 93,
96
+ "ấ": 94,
97
+ "ầ": 95,
98
+ "ẩ": 96,
99
+ "ẫ": 97,
100
+ "ậ": 98,
101
+ "ắ": 99,
102
+ "ằ": 100,
103
+ "ẳ": 101,
104
+ "ẵ": 102,
105
+ "ặ": 103,
106
+ "ẹ": 104,
107
+ "ẻ": 105,
108
+ "ẽ": 106,
109
+ "ế": 107,
110
+ "ề": 108,
111
+ "ể": 109,
112
+ "ễ": 110,
113
+ "ệ": 111,
114
+ "ỉ": 112,
115
+ "ị": 113,
116
+ "ọ": 114,
117
+ "ỏ": 115,
118
+ "ố": 116,
119
+ "ồ": 117,
120
+ "ổ": 118,
121
+ "ỗ": 119,
122
+ "ộ": 120,
123
+ "ớ": 121,
124
+ "ờ": 122,
125
+ "ở": 123,
126
+ "ỡ": 124,
127
+ "ợ": 125,
128
+ "ụ": 126,
129
+ "ủ": 127,
130
+ "ứ": 128,
131
+ "ừ": 129,
132
+ "ử": 130,
133
+ "ữ": 131,
134
+ "ự": 132,
135
+ "ỳ": 133,
136
+ "ỵ": 134,
137
+ "ỷ": 135,
138
+ "ỹ": 136,
139
+ "‎": 137,
140
+ "–": 138,
141
+ "‘": 139,
142
+ "’": 140,
143
+ "“": 141,
144
+ "”": 142,
145
+ "…": 143,
146
+ "": 144
147
+ }