marziye-A commited on
Commit
1e56595
1 Parent(s): 2f4c142

Upload tokenizer

Browse files
Files changed (4) hide show
  1. added_tokens.json +4 -0
  2. special_tokens_map.json +22 -0
  3. tokenizer_config.json +13 -0
  4. vocab.json +130 -0
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "</s>": 129,
3
+ "<s>": 128
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "</s>",
12
+ "lstrip": false,
13
+ "normalized": true,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ }
17
+ ],
18
+ "bos_token": "<s>",
19
+ "eos_token": "</s>",
20
+ "pad_token": "[PAD]",
21
+ "unk_token": "[UNK]"
22
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "clean_up_tokenization_spaces": true,
4
+ "do_lower_case": false,
5
+ "eos_token": "</s>",
6
+ "model_max_length": 1000000000000000019884624838656,
7
+ "pad_token": "[PAD]",
8
+ "replace_word_delimiter_char": " ",
9
+ "target_lang": null,
10
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
11
+ "unk_token": "[UNK]",
12
+ "word_delimiter_token": "|"
13
+ }
vocab.json ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "#": 1,
3
+ "&": 2,
4
+ "(": 3,
5
+ ")": 4,
6
+ "[PAD]": 127,
7
+ "[UNK]": 126,
8
+ "_": 5,
9
+ "a": 6,
10
+ "b": 7,
11
+ "c": 8,
12
+ "d": 9,
13
+ "e": 10,
14
+ "f": 11,
15
+ "g": 12,
16
+ "h": 13,
17
+ "i": 14,
18
+ "k": 15,
19
+ "l": 16,
20
+ "m": 17,
21
+ "n": 18,
22
+ "o": 19,
23
+ "p": 20,
24
+ "q": 21,
25
+ "r": 22,
26
+ "s": 23,
27
+ "t": 24,
28
+ "u": 25,
29
+ "v": 26,
30
+ "w": 27,
31
+ "x": 28,
32
+ "y": 29,
33
+ "z": 30,
34
+ "|": 0,
35
+ "«": 31,
36
+ "»": 32,
37
+ "ā": 33,
38
+ "š": 34,
39
+ "؛": 35,
40
+ "؟": 36,
41
+ "ء": 37,
42
+ "آ": 38,
43
+ "أ": 39,
44
+ "ؤ": 40,
45
+ "ئ": 41,
46
+ "ا": 42,
47
+ "ب": 43,
48
+ "ة": 44,
49
+ "ت": 45,
50
+ "ث": 46,
51
+ "ج": 47,
52
+ "ح": 48,
53
+ "خ": 49,
54
+ "د": 50,
55
+ "ذ": 51,
56
+ "ر": 52,
57
+ "ز": 53,
58
+ "س": 54,
59
+ "ش": 55,
60
+ "ص": 56,
61
+ "ض": 57,
62
+ "ط": 58,
63
+ "ظ": 59,
64
+ "ع": 60,
65
+ "غ": 61,
66
+ "ـ": 62,
67
+ "ف": 63,
68
+ "ق": 64,
69
+ "ك": 65,
70
+ "ل": 66,
71
+ "م": 67,
72
+ "ن": 68,
73
+ "ه": 69,
74
+ "و": 70,
75
+ "ى": 71,
76
+ "ي": 72,
77
+ "ً": 73,
78
+ "ٌ": 74,
79
+ "َ": 75,
80
+ "ُ": 76,
81
+ "ِ": 77,
82
+ "ّ": 78,
83
+ "ْ": 79,
84
+ "ٔ": 80,
85
+ "٬": 81,
86
+ "پ": 82,
87
+ "چ": 83,
88
+ "ژ": 84,
89
+ "ک": 85,
90
+ "گ": 86,
91
+ "ۀ": 87,
92
+ "ی": 88,
93
+ "ے": 89,
94
+ "ە": 90,
95
+ "–": 91,
96
+ "…": 92,
97
+ "ﭘ": 93,
98
+ "ﮐ": 94,
99
+ "ﮔ": 95,
100
+ "ﯽ": 96,
101
+ "ﯾ": 97,
102
+ "ﯿ": 98,
103
+ "ﺍ": 99,
104
+ "ﺎ": 100,
105
+ "ﺑ": 101,
106
+ "ﺒ": 102,
107
+ "ﺖ": 103,
108
+ "ﺘ": 104,
109
+ "ﺧ": 105,
110
+ "ﺩ": 106,
111
+ "ﺪ": 107,
112
+ "ﺭ": 108,
113
+ "ﺮ": 109,
114
+ "ﺱ": 110,
115
+ "ﺴ": 111,
116
+ "ﺷ": 112,
117
+ "ﺸ": 113,
118
+ "ﻀ": 114,
119
+ "ﻋ": 115,
120
+ "ﻌ": 116,
121
+ "ﻟ": 117,
122
+ "ﻡ": 118,
123
+ "ﻢ": 119,
124
+ "ﻤ": 120,
125
+ "ﻥ": 121,
126
+ "ﻧ": 122,
127
+ "ﻪ": 123,
128
+ "ﻭ": 124,
129
+ "ﻮ": 125
130
+ }