ALIGHASEMI931 commited on
Commit
8a8474f
1 Parent(s): 76edc76

Upload tokenizer

Browse files
Files changed (4) hide show
  1. added_tokens.json +4 -0
  2. special_tokens_map.json +22 -0
  3. tokenizer_config.json +13 -0
  4. vocab.json +126 -0
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "</s>": 125,
3
+ "<s>": 124
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "</s>",
12
+ "lstrip": false,
13
+ "normalized": true,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ }
17
+ ],
18
+ "bos_token": "<s>",
19
+ "eos_token": "</s>",
20
+ "pad_token": "[PAD]",
21
+ "unk_token": "[UNK]"
22
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "clean_up_tokenization_spaces": true,
4
+ "do_lower_case": false,
5
+ "eos_token": "</s>",
6
+ "model_max_length": 1000000000000000019884624838656,
7
+ "pad_token": "[PAD]",
8
+ "replace_word_delimiter_char": " ",
9
+ "target_lang": null,
10
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
11
+ "unk_token": "[UNK]",
12
+ "word_delimiter_token": "|"
13
+ }
vocab.json ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "#": 1,
3
+ "&": 2,
4
+ "(": 3,
5
+ ")": 4,
6
+ "[PAD]": 123,
7
+ "[UNK]": 122,
8
+ "_": 5,
9
+ "a": 6,
10
+ "b": 7,
11
+ "c": 8,
12
+ "d": 9,
13
+ "e": 10,
14
+ "f": 11,
15
+ "g": 12,
16
+ "h": 13,
17
+ "i": 14,
18
+ "k": 15,
19
+ "m": 16,
20
+ "n": 17,
21
+ "o": 18,
22
+ "p": 19,
23
+ "q": 20,
24
+ "r": 21,
25
+ "s": 22,
26
+ "t": 23,
27
+ "u": 24,
28
+ "y": 25,
29
+ "z": 26,
30
+ "|": 0,
31
+ "«": 27,
32
+ "»": 28,
33
+ "ā": 29,
34
+ "š": 30,
35
+ "،": 31,
36
+ "؛": 32,
37
+ "؟": 33,
38
+ "ء": 34,
39
+ "آ": 35,
40
+ "أ": 36,
41
+ "ؤ": 37,
42
+ "ئ": 38,
43
+ "ا": 39,
44
+ "ب": 40,
45
+ "ة": 41,
46
+ "ت": 42,
47
+ "ث": 43,
48
+ "ج": 44,
49
+ "ح": 45,
50
+ "خ": 46,
51
+ "د": 47,
52
+ "ذ": 48,
53
+ "ر": 49,
54
+ "ز": 50,
55
+ "س": 51,
56
+ "ش": 52,
57
+ "ص": 53,
58
+ "ض": 54,
59
+ "ط": 55,
60
+ "ظ": 56,
61
+ "ع": 57,
62
+ "غ": 58,
63
+ "ـ": 59,
64
+ "ف": 60,
65
+ "ق": 61,
66
+ "ك": 62,
67
+ "ل": 63,
68
+ "م": 64,
69
+ "ن": 65,
70
+ "ه": 66,
71
+ "و": 67,
72
+ "ى": 68,
73
+ "ي": 69,
74
+ "ً": 70,
75
+ "ٌ": 71,
76
+ "َ": 72,
77
+ "ُ": 73,
78
+ "ِ": 74,
79
+ "ّ": 75,
80
+ "ْ": 76,
81
+ "ٔ": 77,
82
+ "٬": 78,
83
+ "پ": 79,
84
+ "چ": 80,
85
+ "ژ": 81,
86
+ "ک": 82,
87
+ "گ": 83,
88
+ "ۀ": 84,
89
+ "ی": 85,
90
+ "ے": 86,
91
+ "–": 87,
92
+ "…": 88,
93
+ "ﭘ": 89,
94
+ "ﮐ": 90,
95
+ "ﮔ": 91,
96
+ "ﯽ": 92,
97
+ "ﯾ": 93,
98
+ "ﯿ": 94,
99
+ "ﺍ": 95,
100
+ "ﺎ": 96,
101
+ "ﺑ": 97,
102
+ "ﺒ": 98,
103
+ "ﺖ": 99,
104
+ "ﺘ": 100,
105
+ "ﺧ": 101,
106
+ "ﺩ": 102,
107
+ "ﺪ": 103,
108
+ "ﺭ": 104,
109
+ "ﺮ": 105,
110
+ "ﺱ": 106,
111
+ "ﺴ": 107,
112
+ "ﺷ": 108,
113
+ "ﺸ": 109,
114
+ "ﻀ": 110,
115
+ "ﻋ": 111,
116
+ "ﻌ": 112,
117
+ "ﻟ": 113,
118
+ "ﻡ": 114,
119
+ "ﻢ": 115,
120
+ "ﻤ": 116,
121
+ "ﻥ": 117,
122
+ "ﻧ": 118,
123
+ "ﻪ": 119,
124
+ "ﻭ": 120,
125
+ "ﻮ": 121
126
+ }