FarisHijazi commited on
Commit
d36554f
1 Parent(s): 4f7066b

add tokenizer

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<s>": 44, "</s>": 45}
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "[UNK]", "pad_token": "[PAD]", "additional_special_tokens": [{"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}]}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "[PAD]", "do_lower_case": false, "word_delimiter_token": "|", "special_tokens_map_file": null, "tokenizer_file": null, "name_or_path": "./", "tokenizer_class": "Wav2Vec2CTCTokenizer"}
vocab.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"؛": 1, "ء": 2, "ؤ": 3, "ئ": 4, "ا": 5, "ب": 6, "ت": 7, "ث": 8, "ج": 9, "ح": 10, "خ": 11, "د": 12, "ذ": 13, "ر": 14, "ز": 15, "س": 16, "ش": 17, "ص": 18, "ض": 19, "ط": 20, "ظ": 21, "ع": 22, "غ": 23, "ف": 24, "ق": 25, "ك": 26, "ل": 27, "م": 28, "ن": 29, "ه": 30, "و": 31, "ي": 32, "ٰ": 33, "چ": 34, "ڨ": 35, "ک": 36, "ھ": 37, "ی": 38, "ۚ": 39, "ﺃ": 40, "ﻻ": 41, "|": 0, "[UNK]": 42, "[PAD]": 43}