add tokenizer
Browse files- special_tokens_map.json +1 -0
- tokenizer_config.json +1 -0
- vocab.json +1 -0
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"bos_token": "<s>", "eos_token": "</s>", "unk_token": "[UNK]", "pad_token": "[PAD]"}
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"unk_token": "[UNK]", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "[PAD]", "do_lower_case": false, "word_delimiter_token": "|", "tokenizer_class": "Wav2Vec2CTCTokenizer"}
|
vocab.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"ж": 0, "н": 2, "б": 3, "у": 4, "т": 5, "ь": 6, "х": 7, "з": 8, "д": 9, "м": 10, "г": 11, "ч": 12, "и": 13, "п": 14, "к": 15, "ф": 16, "а": 17, "ц": 18, "ё": 19, "е": 20, "ы": 21, "ю": 22, "ш": 23, "в": 24, "щ": 25, "о": 26, "р": 27, "ъ": 28, "с": 29, "э": 30, "й": 31, "я": 32, "л": 33, "|": 1, "[UNK]": 34, "[PAD]": 35}
|