ashesicsis1 commited on
Commit
185c9b6
1 Parent(s): 856a49c

commit files to HF hub

Browse files
alphabet.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"labels": [" ", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "\u2047", "", "<s>", "</s>"], "is_bpe": false}
language_model/5gram.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2b15c479141d06a5fab8b50378663dd55aecb5f38a028c9a6777ee52307793c
3
+ size 3921919482
language_model/attrs.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"alpha": 0.5, "beta": 1.0, "unk_score_offset": -10.0, "score_boundary": true}
language_model/unigrams.txt ADDED
The diff for this file is too large to render. See raw diff
 
preprocessor_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "ignore_mismatched_sizes": true,
6
+ "padding_side": "right",
7
+ "padding_value": 0.0,
8
+ "processor_class": "Wav2Vec2ProcessorWithLM",
9
+ "return_attention_mask": true,
10
+ "sampling_rate": 16000
11
+ }
special_tokens_map.json CHANGED
@@ -1,5 +1,33 @@
1
  {
2
  "additional_special_tokens": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  {
4
  "content": "<s>",
5
  "lstrip": false,
 
1
  {
2
  "additional_special_tokens": [
3
+ {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "</s>",
12
+ "lstrip": false,
13
+ "normalized": true,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ },
17
+ {
18
+ "content": "<s>",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ {
25
+ "content": "</s>",
26
+ "lstrip": false,
27
+ "normalized": true,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ },
31
  {
32
  "content": "<s>",
33
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -2,11 +2,13 @@
2
  "bos_token": "<s>",
3
  "do_lower_case": false,
4
  "eos_token": "</s>",
5
- "name_or_path": "./",
6
  "pad_token": "[PAD]",
 
7
  "replace_word_delimiter_char": " ",
8
  "special_tokens_map_file": null,
9
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
10
  "unk_token": "[UNK]",
11
  "word_delimiter_token": "|"
12
  }
 
2
  "bos_token": "<s>",
3
  "do_lower_case": false,
4
  "eos_token": "</s>",
5
+ "name_or_path": "/content/drive/MyDrive/CAAD/dataset/large_tokenizer-lv60/",
6
  "pad_token": "[PAD]",
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
  "replace_word_delimiter_char": " ",
9
  "special_tokens_map_file": null,
10
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
11
+ "tokenizer_file": null,
12
  "unk_token": "[UNK]",
13
  "word_delimiter_token": "|"
14
  }