Akashpb13 commited on
Commit
02bd78a
1 Parent(s): 99ded6a

second commit

Browse files
Files changed (3) hide show
  1. config.json +4 -4
  2. pytorch_model.bin +1 -1
  3. vocab.json +1 -1
config.json CHANGED
@@ -5,7 +5,7 @@
5
  "architectures": [
6
  "Wav2Vec2ForCTC"
7
  ],
8
- "attention_dropout": 0.1,
9
  "bos_token_id": 1,
10
  "conv_bias": true,
11
  "conv_dim": [
@@ -42,16 +42,16 @@
42
  "feat_extract_activation": "gelu",
43
  "feat_extract_dropout": 0.0,
44
  "feat_extract_norm": "layer",
45
- "feat_proj_dropout": 0.0,
46
  "final_dropout": 0.0,
47
  "gradient_checkpointing": true,
48
  "hidden_act": "gelu",
49
- "hidden_dropout": 0.1,
50
  "hidden_size": 1024,
51
  "initializer_range": 0.02,
52
  "intermediate_size": 4096,
53
  "layer_norm_eps": 1e-05,
54
- "layerdrop": 0.1,
55
  "mask_channel_length": 10,
56
  "mask_channel_min_space": 1,
57
  "mask_channel_other": 0.0,
5
  "architectures": [
6
  "Wav2Vec2ForCTC"
7
  ],
8
+ "attention_dropout": 0.05,
9
  "bos_token_id": 1,
10
  "conv_bias": true,
11
  "conv_dim": [
42
  "feat_extract_activation": "gelu",
43
  "feat_extract_dropout": 0.0,
44
  "feat_extract_norm": "layer",
45
+ "feat_proj_dropout": 0.05,
46
  "final_dropout": 0.0,
47
  "gradient_checkpointing": true,
48
  "hidden_act": "gelu",
49
+ "hidden_dropout": 0.05,
50
  "hidden_size": 1024,
51
  "initializer_range": 0.02,
52
  "intermediate_size": 4096,
53
  "layer_norm_eps": 1e-05,
54
+ "layerdrop": 0.06,
55
  "mask_channel_length": 10,
56
  "mask_channel_min_space": 1,
57
  "mask_channel_other": 0.0,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e30f3b973464aa9a51c0bddefc7aeb8623e4a181582594a1fbfdeb67b1e0dc8f
3
  size 1262114199
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:096d445ca653902f1375bc85830c0971c5a049bfa5c1adb57fa585f4e09a359b
3
  size 1262114199
vocab.json CHANGED
@@ -1 +1 @@
1
- {"à": 0, "á": 2, "ċ": 3, "é": 4, "d": 5, "r": 6, "h": 7, "p": 8, "ì": 9, "ò": 10, "f": 11, "o": 12, "b": 13, "s": 14, "u": 15, "z": 16, "a": 17, "n": 18, "x": 19, "c": 20, "ù": 21, "m": 22, "t": 23, "ć": 24, "i": 25, "": 26, "'": 27, "v": 28, "l": 29, "ħ": 30, "q": 31, "w": 32, "j": 33, "è": 34, "`": 35, "g": 36, "k": 37, "e": 38, "ż": 39, "ġ": 40, "y": 41, "|": 1, "[UNK]": 42, "[PAD]": 43}
1
+ {"j": 0, "i": 1, "q": 2, "h": 3, "p": 4, "à": 5, "s": 6, "ò": 7, "è": 8, "g": 9, "e": 10, "m": 11, "u": 12, "k": 13, "x": 14, "á": 15, "w": 16, "é": 17, "ċ": 18, "t": 19, "ì": 20, "v": 21, "'": 22, "z": 23, "o": 24, "y": 25, "l": 26, "ù": 27, "": 28, "n": 29, "ż": 30, "ġ": 31, "b": 33, "r": 34, "d": 35, "c": 36, "ħ": 37, "f": 38, "ć": 39, "`": 40, "a": 41, "|": 32, "[UNK]": 42, "[PAD]": 43}