robinhad commited on
Commit
70372e7
1 Parent(s): f0b1d25

Upload 5 files

Browse files
added_tokens.json CHANGED
@@ -1 +1,4 @@
1
- {"<s>": 36, "</s>": 37}
 
 
 
 
1
+ {
2
+ "</s>": 38,
3
+ "<s>": 37
4
+ }
preprocessor_config.json CHANGED
@@ -4,6 +4,7 @@
4
  "feature_size": 1,
5
  "padding_side": "right",
6
  "padding_value": 0.0,
 
7
  "return_attention_mask": true,
8
  "sampling_rate": 16000
9
  }
 
4
  "feature_size": 1,
5
  "padding_side": "right",
6
  "padding_value": 0.0,
7
+ "processor_class": "Wav2Vec2Processor",
8
  "return_attention_mask": true,
9
  "sampling_rate": 16000
10
  }
special_tokens_map.json CHANGED
@@ -1 +1,22 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "[UNK]", "pad_token": "[PAD]", "additional_special_tokens": [{"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}]}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "</s>",
12
+ "lstrip": false,
13
+ "normalized": true,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ }
17
+ ],
18
+ "bos_token": "<s>",
19
+ "eos_token": "</s>",
20
+ "pad_token": "[PAD]",
21
+ "unk_token": "[UNK]"
22
+ }
tokenizer_config.json CHANGED
@@ -1 +1,14 @@
1
- {"unk_token": "[UNK]", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "[PAD]", "do_lower_case": false, "word_delimiter_token": "|", "special_tokens_map_file": null, "tokenizer_file": null, "name_or_path": "./", "tokenizer_class": "Wav2Vec2CTCTokenizer"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "do_lower_case": false,
4
+ "eos_token": "</s>",
5
+ "model_max_length": 1000000000000000019884624838656,
6
+ "name_or_path": "./",
7
+ "pad_token": "[PAD]",
8
+ "processor_class": "Wav2Vec2Processor",
9
+ "replace_word_delimiter_char": " ",
10
+ "special_tokens_map_file": null,
11
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
12
+ "unk_token": "[UNK]",
13
+ "word_delimiter_token": "|"
14
+ }
vocab.json CHANGED
@@ -1 +1,39 @@
1
- {"'": 1, "\u0430": 2, "\u0431": 3, "\u0432": 4, "\u0433": 5, "\u0434": 6, "\u0435": 7, "\u0436": 8, "\u0437": 9, "\u0438": 10, "\u0439": 11, "\u043a": 12, "\u043b": 13, "\u043c": 14, "\u043d": 15, "\u043e": 16, "\u043f": 17, "\u0440": 18, "\u0441": 19, "\u0442": 20, "\u0443": 21, "\u0444": 22, "\u0445": 23, "\u0446": 24, "\u0447": 25, "\u0448": 26, "\u0449": 27, "\u044c": 28, "\u044e": 29, "\u044f": 30, "\u0454": 31, "\u0456": 32, "\u0457": 33, "\u0491": 34, "|": 0, "[UNK]": 35, "[PAD]": 36}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "'": 1,
3
+ "[PAD]": 36,
4
+ "[UNK]": 35,
5
+ "|": 0,
6
+ "а": 2,
7
+ "б": 3,
8
+ "в": 4,
9
+ "г": 5,
10
+ "д": 6,
11
+ "е": 7,
12
+ "ж": 8,
13
+ "з": 9,
14
+ "и": 10,
15
+ "й": 11,
16
+ "к": 12,
17
+ "л": 13,
18
+ "м": 14,
19
+ "н": 15,
20
+ "о": 16,
21
+ "п": 17,
22
+ "р": 18,
23
+ "с": 19,
24
+ "т": 20,
25
+ "у": 21,
26
+ "ф": 22,
27
+ "х": 23,
28
+ "ц": 24,
29
+ "ч": 25,
30
+ "ш": 26,
31
+ "щ": 27,
32
+ "ь": 28,
33
+ "ю": 29,
34
+ "я": 30,
35
+ "є": 31,
36
+ "і": 32,
37
+ "ї": 33,
38
+ "ґ": 34
39
+ }