patrickvonplaten commited on
Commit
b62cd0a
1 Parent(s): 74a88c9
config.json ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.1,
3
+ "add_adapter": false,
4
+ "architectures": [
5
+ "Data2VecForCTC"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "attention_probs_dropout_prob": 0.1,
9
+ "bos_token_id": 0,
10
+ "classifier_dropout": null,
11
+ "conv_bias": false,
12
+ "conv_dim": [
13
+ 512,
14
+ 512,
15
+ 512,
16
+ 512,
17
+ 512,
18
+ 512,
19
+ 512
20
+ ],
21
+ "conv_kernel": [
22
+ 10,
23
+ 3,
24
+ 3,
25
+ 3,
26
+ 3,
27
+ 2,
28
+ 2
29
+ ],
30
+ "conv_pos_kernel_size": 19,
31
+ "conv_stride": [
32
+ 5,
33
+ 2,
34
+ 2,
35
+ 2,
36
+ 2,
37
+ 2,
38
+ 2
39
+ ],
40
+ "do_stable_layer_norm": false,
41
+ "eos_token_id": 2,
42
+ "feat_extract_activation": "gelu",
43
+ "feat_extract_norm": "layer",
44
+ "feat_proj_dropout": 0.0,
45
+ "final_dropout": 0.1,
46
+ "hidden_act": "gelu",
47
+ "hidden_dropout": 0.1,
48
+ "hidden_dropout_prob": 0.1,
49
+ "hidden_size": 768,
50
+ "initializer_range": 0.02,
51
+ "intermediate_size": 3072,
52
+ "layer_norm_eps": 1e-05,
53
+ "mask_time_prob": 0.05,
54
+ "max_position_embeddings": 512,
55
+ "model_type": "data2vec",
56
+ "num_attention_heads": 12,
57
+ "num_conv_pos_embedding_groups": 16,
58
+ "num_conv_pos_embeddings": 5,
59
+ "num_feat_extract_layers": 7,
60
+ "num_hidden_layers": 12,
61
+ "pad_token_id": 1,
62
+ "position_embedding_type": "absolute",
63
+ "proj_codevector_dim": 256,
64
+ "torch_dtype": "float32",
65
+ "transformers_version": "4.17.0.dev0",
66
+ "type_vocab_size": 2,
67
+ "use_cache": true,
68
+ "vocab_size": 32
69
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0.0,
7
+ "processor_class": "Wav2Vec2Processor",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:218448a15e6698a0051057715835a3fcfccdf4b5111761fa01bf12df8544fd0f
3
+ size 372845489
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>"}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "<pad>", "do_lower_case": false, "word_delimiter_token": "|", "return_attention_mask": false, "do_normalize": true, "special_tokens_map_file": "/home/patrick/.cache/huggingface/transformers/60230682499b8486f2a3109ba26ac7395fd4eba61426f05432329ccbfac7c190.9d6cd81ef646692fb1c169a880161ea1cb95f49694f220aced9b704b457e51dd", "name_or_path": "facebook/wav2vec2-large-lv60", "tokenizer_class": "Wav2Vec2CTCTokenizer", "processor_class": "Wav2Vec2Processor"}
vocab.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<pad>": 0, "<s>": 1, "</s>": 2, "<unk>": 3, "|": 4, "E": 5, "T": 6, "A": 7, "O": 8, "N": 9, "I": 10, "H": 11, "S": 12, "R": 13, "D": 14, "L": 15, "U": 16, "M": 17, "W": 18, "C": 19, "F": 20, "G": 21, "Y": 22, "P": 23, "B": 24, "V": 25, "K": 26, "'": 27, "X": 28, "J": 29, "Q": 30, "Z": 31}