add tokenizer
Browse files- added_tokens.json +1 -0
- special_tokens_map.json +1 -0
- tokenizer_config.json +1 -0
- vocab.json +1 -0
added_tokens.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"<s>": 30, "</s>": 31}
|
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"bos_token": "<s>", "eos_token": "</s>", "unk_token": "[UNK]", "pad_token": "[PAD]", "additional_special_tokens": [{"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}]}
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"unk_token": "[UNK]", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "[PAD]", "do_lower_case": false, "word_delimiter_token": "|", "special_tokens_map_file": "/root/.cache/huggingface/transformers/04dca8bcd381f6e74c0145674cde1ec5243971712856ca8ccd7e2aa8efdd7f90.a21d51735cf8667bcd610f057e88548d5d6a381401f6b4501a8bc6c1a9dc8498", "tokenizer_file": null, "name_or_path": "shields/wav2vec2-base-timit-and-five-sec-dementiabank-new", "tokenizer_class": "Wav2Vec2CTCTokenizer"}
|
vocab.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"z": 0, "h": 1, "g": 2, "s": 3, "b": 4, "y": 5, "m": 6, "v": 7, "x": 8, "n": 9, "d": 10, "'": 11, "u": 13, "q": 14, "a": 15, "o": 16, "e": 17, "w": 18, "c": 19, "f": 20, "k": 21, "p": 22, "j": 23, "t": 24, "r": 25, "l": 26, "i": 27, "|": 12, "[UNK]": 28, "[PAD]": 29}
|