ihanif commited on
Commit
9c01967
1 Parent(s): 4f11ccd

Upload lm-boosted decoder

Browse files
.gitattributes CHANGED
@@ -32,3 +32,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
35
+ 5gram.arpa filter=lfs diff=lfs merge=lfs -text
36
+ 5gram_correct.arpa filter=lfs diff=lfs merge=lfs -text
5gram.arpa ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:652210d65f65c5820437781e60777775161c5c38152a0ec46f1c04ffb8e14e7f
3
+ size 343315656
5gram_correct.arpa ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8971263946d477b5f17d03cce43daa3217acee534ad27f655ed4307f61b91f01
3
+ size 343315675
alphabet.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"labels": [" ", "_", "`", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "\u00a5", "\u00b0", "\u00bd", "\u00be", "\u00e9", "\u00ed", "\u060c", "\u061b", "\u061f", "\u0621", "\u0622", "\u0623", "\u0624", "\u0626", "\u0627", "\u0628", "\u062a", "\u062b", "\u062c", "\u062d", "\u062e", "\u062f", "\u0630", "\u0631", "\u0632", "\u0633", "\u0634", "\u0635", "\u0636", "\u0637", "\u0638", "\u0639", "\u063a", "\u0640", "\u0641", "\u0642", "\u0643", "\u0644", "\u0645", "\u0646", "\u0647", "\u0648", "\u0649", "\u064a", "\u064b", "\u064c", "\u064e", "\u064f", "\u0651", "\u066a", "\u067c", "\u067e", "\u0681", "\u0685", "\u0686", "\u0689", "\u0693", "\u0696", "\u0698", "\u069a", "\u06a9", "\u06ab", "\u06af", "\u06bc", "\u06be", "\u06cc", "\u06cd", "\u06d0", "\u06d2", "\u06d4", "\u06f0", "\u06f1", "\u06f2", "\u06f3", "\u06f4", "\u06f5", "\u06f6", "\u06f7", "\u06f8", "\u06f9", "\u200c", "\u200d", "\u2013", "\u2014", "\u2047", "", "<s>", "</s>"], "is_bpe": false}
language_model/5gram.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d58dcc6784862a21600791165b5fdff97888c792e86884b1dde88992b0d1516
3
+ size 146434282
language_model/attrs.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"alpha": 0.5, "beta": 1.5, "unk_score_offset": -10.0, "score_boundary": true}
language_model/unigrams.txt ADDED
The diff for this file is too large to render. See raw diff
 
preprocessor_config.json CHANGED
@@ -4,6 +4,7 @@
4
  "feature_size": 1,
5
  "padding_side": "right",
6
  "padding_value": 0,
 
7
  "return_attention_mask": true,
8
  "sampling_rate": 16000
9
  }
 
4
  "feature_size": 1,
5
  "padding_side": "right",
6
  "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
  "return_attention_mask": true,
9
  "sampling_rate": 16000
10
  }
special_tokens_map.json CHANGED
@@ -105,6 +105,20 @@
105
  "rstrip": false,
106
  "single_word": false
107
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
108
  {
109
  "content": "</s>",
110
  "lstrip": false,
 
105
  "rstrip": false,
106
  "single_word": false
107
  },
108
+ {
109
+ "content": "</s>",
110
+ "lstrip": false,
111
+ "normalized": true,
112
+ "rstrip": false,
113
+ "single_word": false
114
+ },
115
+ {
116
+ "content": "<s>",
117
+ "lstrip": false,
118
+ "normalized": true,
119
+ "rstrip": false,
120
+ "single_word": false
121
+ },
122
  {
123
  "content": "</s>",
124
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -3,8 +3,9 @@
3
  "do_lower_case": false,
4
  "eos_token": "</s>",
5
  "model_max_length": 1000000000000000019884624838656,
6
- "name_or_path": "./",
7
  "pad_token": "[PAD]",
 
8
  "replace_word_delimiter_char": " ",
9
  "special_tokens_map_file": null,
10
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
3
  "do_lower_case": false,
4
  "eos_token": "</s>",
5
  "model_max_length": 1000000000000000019884624838656,
6
+ "name_or_path": "ihanif/wav2vec2-xls-r-300m-pashto",
7
  "pad_token": "[PAD]",
8
+ "processor_class": "Wav2Vec2ProcessorWithLM",
9
  "replace_word_delimiter_char": " ",
10
  "special_tokens_map_file": null,
11
  "tokenizer_class": "Wav2Vec2CTCTokenizer",