shReYas0363 commited on
Commit
5727c4f
1 Parent(s): 5c79844

Training in progress, step 2001

Browse files
config.json CHANGED
@@ -7,7 +7,10 @@
7
  "WhisperForConditionalGeneration"
8
  ],
9
  "attention_dropout": 0.0,
10
- "begin_suppress_tokens": null,
 
 
 
11
  "bos_token_id": 50257,
12
  "classifier_proj_size": 256,
13
  "d_model": 384,
@@ -44,7 +47,7 @@
44
  "mask_time_length": 10,
45
  "mask_time_min_masks": 2,
46
  "mask_time_prob": 0.05,
47
- "max_length": null,
48
  "max_source_positions": 1500,
49
  "max_target_positions": 448,
50
  "median_filter_width": 7,
@@ -53,8 +56,98 @@
53
  "num_mel_bins": 80,
54
  "pad_token_id": 50257,
55
  "scale_embedding": false,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  "torch_dtype": "float32",
57
- "transformers_version": "4.45.2",
58
  "use_cache": true,
59
  "use_weighted_layer_sum": false,
60
  "vocab_size": 51865
 
7
  "WhisperForConditionalGeneration"
8
  ],
9
  "attention_dropout": 0.0,
10
+ "begin_suppress_tokens": [
11
+ 220,
12
+ 50257
13
+ ],
14
  "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
16
  "d_model": 384,
 
47
  "mask_time_length": 10,
48
  "mask_time_min_masks": 2,
49
  "mask_time_prob": 0.05,
50
+ "max_length": 448,
51
  "max_source_positions": 1500,
52
  "max_target_positions": 448,
53
  "median_filter_width": 7,
 
56
  "num_mel_bins": 80,
57
  "pad_token_id": 50257,
58
  "scale_embedding": false,
59
+ "suppress_tokens": [
60
+ 1,
61
+ 2,
62
+ 7,
63
+ 8,
64
+ 9,
65
+ 10,
66
+ 14,
67
+ 25,
68
+ 26,
69
+ 27,
70
+ 28,
71
+ 29,
72
+ 31,
73
+ 58,
74
+ 59,
75
+ 60,
76
+ 61,
77
+ 62,
78
+ 63,
79
+ 90,
80
+ 91,
81
+ 92,
82
+ 93,
83
+ 359,
84
+ 503,
85
+ 522,
86
+ 542,
87
+ 873,
88
+ 893,
89
+ 902,
90
+ 918,
91
+ 922,
92
+ 931,
93
+ 1350,
94
+ 1853,
95
+ 1982,
96
+ 2460,
97
+ 2627,
98
+ 3246,
99
+ 3253,
100
+ 3268,
101
+ 3536,
102
+ 3846,
103
+ 3961,
104
+ 4183,
105
+ 4667,
106
+ 6585,
107
+ 6647,
108
+ 7273,
109
+ 9061,
110
+ 9383,
111
+ 10428,
112
+ 10929,
113
+ 11938,
114
+ 12033,
115
+ 12331,
116
+ 12562,
117
+ 13793,
118
+ 14157,
119
+ 14635,
120
+ 15265,
121
+ 15618,
122
+ 16553,
123
+ 16604,
124
+ 18362,
125
+ 18956,
126
+ 20075,
127
+ 21675,
128
+ 22520,
129
+ 26130,
130
+ 26161,
131
+ 26435,
132
+ 28279,
133
+ 29464,
134
+ 31650,
135
+ 32302,
136
+ 32470,
137
+ 36865,
138
+ 42863,
139
+ 47425,
140
+ 49870,
141
+ 50254,
142
+ 50258,
143
+ 50358,
144
+ 50359,
145
+ 50360,
146
+ 50361,
147
+ 50362
148
+ ],
149
  "torch_dtype": "float32",
150
+ "transformers_version": "4.43.3",
151
  "use_cache": true,
152
  "use_weighted_layer_sum": false,
153
  "vocab_size": 51865
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f8f70255a8b639986974ef4a69d430bdc3114eba9c7e5fcdcec24b7437399f78
3
  size 151061672
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:532fe7e5d11661cf5619aa16401c176c32c517e3f932a8328f1d2486a616f8bc
3
  size 151061672
runs/Oct26_09-53-02_DESKTOP-S5FGE5B/events.out.tfevents.1729916586.DESKTOP-S5FGE5B.7128.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:74f62b608a65607e8ef9fc0da7e0c9b4170688bd0fee5d088284c3593bba3212
3
- size 15484
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:24302ce9ecc32b13df4225870e43e86b15ac4c1970a68aa3aa1fb8142312c687
3
+ size 24596
runs/Oct26_12-41-59_DESKTOP-S5FGE5B/events.out.tfevents.1729926751.DESKTOP-S5FGE5B.25476.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da0256cd887f4e0dc4a472d5f77b1309462fdb60466fe9c625fa496f31b9fffd
3
+ size 7071
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0ebf3046ccdc78c47b5ca4bc0434dc828df3f4e024545af19071423c1d9837e2
3
  size 5368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66ed2e6d35801cd84713b1e25d35d281d9b150f33dd101c0cada11ca4979b345
3
  size 5368