bakrianoo commited on
Commit
590c4c5
1 Parent(s): f7b7fff

reset repo files

Browse files
config.json DELETED
@@ -1,76 +0,0 @@
1
- {
2
- "_name_or_path": "facebook/wav2vec2-large-xlsr-53",
3
- "activation_dropout": 0.0,
4
- "apply_spec_augment": true,
5
- "architectures": [
6
- "Wav2Vec2ForCTC"
7
- ],
8
- "attention_dropout": 0.15,
9
- "bos_token_id": 1,
10
- "conv_bias": true,
11
- "conv_dim": [
12
- 512,
13
- 512,
14
- 512,
15
- 512,
16
- 512,
17
- 512,
18
- 512
19
- ],
20
- "conv_kernel": [
21
- 10,
22
- 3,
23
- 3,
24
- 3,
25
- 3,
26
- 2,
27
- 2
28
- ],
29
- "conv_stride": [
30
- 5,
31
- 2,
32
- 2,
33
- 2,
34
- 2,
35
- 2,
36
- 2
37
- ],
38
- "ctc_loss_reduction": "mean",
39
- "ctc_zero_infinity": false,
40
- "do_stable_layer_norm": true,
41
- "eos_token_id": 2,
42
- "feat_extract_activation": "gelu",
43
- "feat_extract_dropout": 0.0,
44
- "feat_extract_norm": "layer",
45
- "feat_proj_dropout": 0.0,
46
- "final_dropout": 0.0,
47
- "gradient_checkpointing": true,
48
- "hidden_act": "gelu",
49
- "hidden_dropout": 0.15,
50
- "hidden_size": 1024,
51
- "initializer_range": 0.02,
52
- "intermediate_size": 4096,
53
- "layer_norm_eps": 1e-05,
54
- "layerdrop": 0.1,
55
- "mask_channel_length": 10,
56
- "mask_channel_min_space": 1,
57
- "mask_channel_other": 0.0,
58
- "mask_channel_prob": 0.0,
59
- "mask_channel_selection": "static",
60
- "mask_feature_length": 10,
61
- "mask_feature_prob": 0.0,
62
- "mask_time_length": 10,
63
- "mask_time_min_space": 1,
64
- "mask_time_other": 0.0,
65
- "mask_time_prob": 0.05,
66
- "mask_time_selection": "static",
67
- "model_type": "wav2vec2",
68
- "num_attention_heads": 16,
69
- "num_conv_pos_embedding_groups": 16,
70
- "num_conv_pos_embeddings": 128,
71
- "num_feat_extract_layers": 7,
72
- "num_hidden_layers": 24,
73
- "pad_token_id": 45,
74
- "transformers_version": "4.4.0",
75
- "vocab_size": 48
76
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
flax_model.msgpack DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8c1cbb240f87dcf909a4be8ec3d21df4d56131489b0aaf964132289f77251c3c
3
- size 1261967072
 
 
 
 
optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ff523d662d9db49ccaadba49574509f044262f42534b15281436d469f3d2a65e
3
- size 2490464146
 
 
 
 
preprocessor_config.json DELETED
@@ -1,8 +0,0 @@
1
- {
2
- "do_normalize": true,
3
- "feature_size": 1,
4
- "padding_side": "right",
5
- "padding_value": 0.0,
6
- "return_attention_mask": true,
7
- "sampling_rate": 16000
8
- }
 
 
 
 
 
 
 
 
 
pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:fe340e53abd873e326771a32ab2b1d21f4ae07a98401dd0ada85940dbacc6e92
3
- size 1262126414
 
 
 
 
scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7db558e13c832cb0475ee05ac56dc0ed99b68cb98e8687b128d7b06e57d53360
3
- size 623
 
 
 
 
special_tokens_map.json DELETED
@@ -1 +0,0 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "[UNK]", "pad_token": "[PAD]"}
 
 
tokenizer_config.json DELETED
@@ -1 +0,0 @@
1
- {"unk_token": "[UNK]", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "[PAD]", "do_lower_case": false, "word_delimiter_token": "|"}
 
 
trainer_state.json DELETED
@@ -1,100 +0,0 @@
1
- {
2
- "best_metric": null,
3
- "best_model_checkpoint": null,
4
- "epoch": 21.238938053097346,
5
- "global_step": 2400,
6
- "is_hyper_param_search": false,
7
- "is_local_process_zero": true,
8
- "is_world_process_zero": true,
9
- "log_history": [
10
- {
11
- "epoch": 3.54,
12
- "learning_rate": 0.0001926605504587156,
13
- "loss": NaN,
14
- "step": 400
15
- },
16
- {
17
- "epoch": 3.54,
18
- "eval_loss": 2.914095878601074,
19
- "eval_runtime": 779.3384,
20
- "eval_samples_per_second": 9.78,
21
- "eval_wer": 1.0,
22
- "step": 400
23
- },
24
- {
25
- "epoch": 7.08,
26
- "learning_rate": 0.0001779816513761468,
27
- "loss": NaN,
28
- "step": 800
29
- },
30
- {
31
- "epoch": 7.08,
32
- "eval_loss": 0.5257614850997925,
33
- "eval_runtime": 808.0741,
34
- "eval_samples_per_second": 9.432,
35
- "eval_wer": 0.4909435120753172,
36
- "step": 800
37
- },
38
- {
39
- "epoch": 10.62,
40
- "learning_rate": 0.00016330275229357798,
41
- "loss": NaN,
42
- "step": 1200
43
- },
44
- {
45
- "epoch": 10.62,
46
- "eval_loss": 0.4604354500770569,
47
- "eval_runtime": 816.9678,
48
- "eval_samples_per_second": 9.33,
49
- "eval_wer": 0.444765656979124,
50
- "step": 1200
51
- },
52
- {
53
- "epoch": 14.16,
54
- "learning_rate": 0.00014862385321100919,
55
- "loss": NaN,
56
- "step": 1600
57
- },
58
- {
59
- "epoch": 14.16,
60
- "eval_loss": 0.4556906819343567,
61
- "eval_runtime": 788.8971,
62
- "eval_samples_per_second": 9.662,
63
- "eval_wer": 0.4178008595988539,
64
- "step": 1600
65
- },
66
- {
67
- "epoch": 17.7,
68
- "learning_rate": 0.00013394495412844036,
69
- "loss": NaN,
70
- "step": 2000
71
- },
72
- {
73
- "epoch": 17.7,
74
- "eval_loss": 0.44174715876579285,
75
- "eval_runtime": 788.0029,
76
- "eval_samples_per_second": 9.673,
77
- "eval_wer": 0.4084885386819484,
78
- "step": 2000
79
- },
80
- {
81
- "epoch": 21.24,
82
- "learning_rate": 0.00011926605504587157,
83
- "loss": NaN,
84
- "step": 2400
85
- },
86
- {
87
- "epoch": 21.24,
88
- "eval_loss": 0.43745675683021545,
89
- "eval_runtime": 797.2999,
90
- "eval_samples_per_second": 9.56,
91
- "eval_wer": 0.4028602128530495,
92
- "step": 2400
93
- }
94
- ],
95
- "max_steps": 5650,
96
- "num_train_epochs": 50,
97
- "total_flos": 8.638551360908018e+19,
98
- "trial_name": null,
99
- "trial_params": null
100
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b674eec74b835dba17b22c39ba4f76f198e2fd26941f1588e8a63aa2c526e6f9
3
- size 2287
 
 
 
 
vocab.json DELETED
@@ -1 +0,0 @@
1
- {"ج": 0, "ح": 1, "ﺃ": 2, "ت": 3, "ط": 4, "خ": 5, "چ": 6, "س": 7, "ب": 8, "غ": 10, "ث": 11, "ض": 12, "ا": 13, "ذ": 14, "ھ": 15, "ز": 16, "ى": 17, "ﻻ": 18, "ظ": 19, "ق": 20, "ص": 21, "م": 22, "ف": 23, "د": 24, "ش": 25, "و": 26, "ه": 27, "ی": 28, "ء": 29, "ر": 30, "آ": 31, "ع": 32, "ي": 33, "ل": 34, "ؤ": 35, "ڨ": 36, "ک": 37, "إ": 38, "أ": 39, "ك": 40, "ة": 41, "ئ": 42, "ن": 43, "|": 9, "[UNK]": 44, "[PAD]": 45}