carlosdanielhernandezmena commited on
Commit
2961748
1 Parent(s): e4e1cad

Uploading the 14 files of the model.

Browse files
added_tokens.json ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<|af|>": 50327,
3
+ "<|am|>": 50334,
4
+ "<|ar|>": 50272,
5
+ "<|as|>": 50350,
6
+ "<|az|>": 50304,
7
+ "<|ba|>": 50355,
8
+ "<|be|>": 50330,
9
+ "<|bg|>": 50292,
10
+ "<|bn|>": 50302,
11
+ "<|bo|>": 50347,
12
+ "<|br|>": 50309,
13
+ "<|bs|>": 50315,
14
+ "<|ca|>": 50270,
15
+ "<|cs|>": 50283,
16
+ "<|cy|>": 50297,
17
+ "<|da|>": 50285,
18
+ "<|de|>": 50261,
19
+ "<|el|>": 50281,
20
+ "<|en|>": 50259,
21
+ "<|es|>": 50262,
22
+ "<|et|>": 50307,
23
+ "<|eu|>": 50310,
24
+ "<|fa|>": 50300,
25
+ "<|fi|>": 50277,
26
+ "<|fo|>": 50338,
27
+ "<|fr|>": 50265,
28
+ "<|gl|>": 50319,
29
+ "<|gu|>": 50333,
30
+ "<|haw|>": 50352,
31
+ "<|ha|>": 50354,
32
+ "<|he|>": 50279,
33
+ "<|hi|>": 50276,
34
+ "<|hr|>": 50291,
35
+ "<|ht|>": 50339,
36
+ "<|hu|>": 50286,
37
+ "<|hy|>": 50312,
38
+ "<|id|>": 50275,
39
+ "<|is|>": 50311,
40
+ "<|it|>": 50274,
41
+ "<|ja|>": 50266,
42
+ "<|jw|>": 50356,
43
+ "<|ka|>": 50329,
44
+ "<|kk|>": 50316,
45
+ "<|km|>": 50323,
46
+ "<|kn|>": 50306,
47
+ "<|ko|>": 50264,
48
+ "<|la|>": 50294,
49
+ "<|lb|>": 50345,
50
+ "<|ln|>": 50353,
51
+ "<|lo|>": 50336,
52
+ "<|lt|>": 50293,
53
+ "<|lv|>": 50301,
54
+ "<|mg|>": 50349,
55
+ "<|mi|>": 50295,
56
+ "<|mk|>": 50308,
57
+ "<|ml|>": 50296,
58
+ "<|mn|>": 50314,
59
+ "<|mr|>": 50320,
60
+ "<|ms|>": 50282,
61
+ "<|mt|>": 50343,
62
+ "<|my|>": 50346,
63
+ "<|ne|>": 50313,
64
+ "<|nl|>": 50271,
65
+ "<|nn|>": 50342,
66
+ "<|nocaptions|>": 50362,
67
+ "<|notimestamps|>": 50363,
68
+ "<|no|>": 50288,
69
+ "<|oc|>": 50328,
70
+ "<|pa|>": 50321,
71
+ "<|pl|>": 50269,
72
+ "<|ps|>": 50340,
73
+ "<|pt|>": 50267,
74
+ "<|ro|>": 50284,
75
+ "<|ru|>": 50263,
76
+ "<|sa|>": 50344,
77
+ "<|sd|>": 50332,
78
+ "<|si|>": 50322,
79
+ "<|sk|>": 50298,
80
+ "<|sl|>": 50305,
81
+ "<|sn|>": 50324,
82
+ "<|so|>": 50326,
83
+ "<|sq|>": 50317,
84
+ "<|sr|>": 50303,
85
+ "<|startoflm|>": 50360,
86
+ "<|startofprev|>": 50361,
87
+ "<|startoftranscript|>": 50258,
88
+ "<|su|>": 50357,
89
+ "<|sv|>": 50273,
90
+ "<|sw|>": 50318,
91
+ "<|ta|>": 50287,
92
+ "<|te|>": 50299,
93
+ "<|tg|>": 50331,
94
+ "<|th|>": 50289,
95
+ "<|tk|>": 50341,
96
+ "<|tl|>": 50348,
97
+ "<|transcribe|>": 50359,
98
+ "<|translate|>": 50358,
99
+ "<|tr|>": 50268,
100
+ "<|tt|>": 50351,
101
+ "<|uk|>": 50280,
102
+ "<|ur|>": 50290,
103
+ "<|uz|>": 50337,
104
+ "<|vi|>": 50278,
105
+ "<|yi|>": 50335,
106
+ "<|yo|>": 50325,
107
+ "<|zh|>": 50260
108
+ }
all_results.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "train_loss": 0.051935346220949447,
4
+ "train_runtime": 527672.5015,
5
+ "train_samples_per_second": 1.899,
6
+ "train_steps_per_second": 0.119
7
+ }
config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "openai/whisper-large",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu",
5
+ "architectures": [
6
+ "WhisperForConditionalGeneration"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "begin_suppress_tokens": [
10
+ 220,
11
+ 50257
12
+ ],
13
+ "bos_token_id": 50257,
14
+ "d_model": 1280,
15
+ "decoder_attention_heads": 20,
16
+ "decoder_ffn_dim": 5120,
17
+ "decoder_layerdrop": 0.0,
18
+ "decoder_layers": 32,
19
+ "decoder_start_token_id": 50258,
20
+ "dropout": 0.0,
21
+ "encoder_attention_heads": 20,
22
+ "encoder_ffn_dim": 5120,
23
+ "encoder_layerdrop": 0.0,
24
+ "encoder_layers": 32,
25
+ "eos_token_id": 50257,
26
+ "forced_decoder_ids": null,
27
+ "init_std": 0.02,
28
+ "is_encoder_decoder": true,
29
+ "max_length": 448,
30
+ "max_source_positions": 1500,
31
+ "max_target_positions": 448,
32
+ "model_type": "whisper",
33
+ "num_hidden_layers": 32,
34
+ "num_mel_bins": 80,
35
+ "pad_token_id": 50257,
36
+ "scale_embedding": false,
37
+ "suppress_tokens": [],
38
+ "torch_dtype": "float32",
39
+ "transformers_version": "4.27.0.dev0",
40
+ "use_cache": false,
41
+ "vocab_size": 51865
42
+ }
generation_config.json ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "begin_suppress_tokens": [
3
+ 220,
4
+ 50257
5
+ ],
6
+ "bos_token_id": 50257,
7
+ "decoder_start_token_id": 50258,
8
+ "eos_token_id": 50257,
9
+ "forced_decoder_ids": [
10
+ [
11
+ 1,
12
+ null
13
+ ],
14
+ [
15
+ 2,
16
+ 50359
17
+ ],
18
+ [
19
+ 3,
20
+ 50363
21
+ ]
22
+ ],
23
+ "is_multilingual": true,
24
+ "lang_to_id": {
25
+ "<|af|>": 50327,
26
+ "<|am|>": 50334,
27
+ "<|ar|>": 50272,
28
+ "<|as|>": 50350,
29
+ "<|az|>": 50304,
30
+ "<|ba|>": 50355,
31
+ "<|be|>": 50330,
32
+ "<|bg|>": 50292,
33
+ "<|bn|>": 50302,
34
+ "<|bo|>": 50347,
35
+ "<|br|>": 50309,
36
+ "<|bs|>": 50315,
37
+ "<|ca|>": 50270,
38
+ "<|cs|>": 50283,
39
+ "<|cy|>": 50297,
40
+ "<|da|>": 50285,
41
+ "<|de|>": 50261,
42
+ "<|el|>": 50281,
43
+ "<|en|>": 50259,
44
+ "<|es|>": 50262,
45
+ "<|et|>": 50307,
46
+ "<|eu|>": 50310,
47
+ "<|fa|>": 50300,
48
+ "<|fi|>": 50277,
49
+ "<|fo|>": 50338,
50
+ "<|fr|>": 50265,
51
+ "<|gl|>": 50319,
52
+ "<|gu|>": 50333,
53
+ "<|haw|>": 50352,
54
+ "<|ha|>": 50354,
55
+ "<|he|>": 50279,
56
+ "<|hi|>": 50276,
57
+ "<|hr|>": 50291,
58
+ "<|ht|>": 50339,
59
+ "<|hu|>": 50286,
60
+ "<|hy|>": 50312,
61
+ "<|id|>": 50275,
62
+ "<|is|>": 50311,
63
+ "<|it|>": 50274,
64
+ "<|ja|>": 50266,
65
+ "<|jw|>": 50356,
66
+ "<|ka|>": 50329,
67
+ "<|kk|>": 50316,
68
+ "<|km|>": 50323,
69
+ "<|kn|>": 50306,
70
+ "<|ko|>": 50264,
71
+ "<|la|>": 50294,
72
+ "<|lb|>": 50345,
73
+ "<|ln|>": 50353,
74
+ "<|lo|>": 50336,
75
+ "<|lt|>": 50293,
76
+ "<|lv|>": 50301,
77
+ "<|mg|>": 50349,
78
+ "<|mi|>": 50295,
79
+ "<|mk|>": 50308,
80
+ "<|ml|>": 50296,
81
+ "<|mn|>": 50314,
82
+ "<|mr|>": 50320,
83
+ "<|ms|>": 50282,
84
+ "<|mt|>": 50343,
85
+ "<|my|>": 50346,
86
+ "<|ne|>": 50313,
87
+ "<|nl|>": 50271,
88
+ "<|nn|>": 50342,
89
+ "<|no|>": 50288,
90
+ "<|oc|>": 50328,
91
+ "<|pa|>": 50321,
92
+ "<|pl|>": 50269,
93
+ "<|ps|>": 50340,
94
+ "<|pt|>": 50267,
95
+ "<|ro|>": 50284,
96
+ "<|ru|>": 50263,
97
+ "<|sa|>": 50344,
98
+ "<|sd|>": 50332,
99
+ "<|si|>": 50322,
100
+ "<|sk|>": 50298,
101
+ "<|sl|>": 50305,
102
+ "<|sn|>": 50324,
103
+ "<|so|>": 50326,
104
+ "<|sq|>": 50317,
105
+ "<|sr|>": 50303,
106
+ "<|su|>": 50357,
107
+ "<|sv|>": 50273,
108
+ "<|sw|>": 50318,
109
+ "<|ta|>": 50287,
110
+ "<|te|>": 50299,
111
+ "<|tg|>": 50331,
112
+ "<|th|>": 50289,
113
+ "<|tk|>": 50341,
114
+ "<|tl|>": 50348,
115
+ "<|tr|>": 50268,
116
+ "<|tt|>": 50351,
117
+ "<|uk|>": 50280,
118
+ "<|ur|>": 50290,
119
+ "<|uz|>": 50337,
120
+ "<|vi|>": 50278,
121
+ "<|yi|>": 50335,
122
+ "<|yo|>": 50325,
123
+ "<|zh|>": 50260
124
+ },
125
+ "max_initial_timestamp_index": 1,
126
+ "max_length": 448,
127
+ "no_timestamps_token_id": 50363,
128
+ "pad_token_id": 50257,
129
+ "return_timestamps": false,
130
+ "suppress_tokens": [
131
+ 1,
132
+ 2,
133
+ 7,
134
+ 8,
135
+ 9,
136
+ 10,
137
+ 14,
138
+ 25,
139
+ 26,
140
+ 27,
141
+ 28,
142
+ 29,
143
+ 31,
144
+ 58,
145
+ 59,
146
+ 60,
147
+ 61,
148
+ 62,
149
+ 63,
150
+ 90,
151
+ 91,
152
+ 92,
153
+ 93,
154
+ 359,
155
+ 503,
156
+ 522,
157
+ 542,
158
+ 873,
159
+ 893,
160
+ 902,
161
+ 918,
162
+ 922,
163
+ 931,
164
+ 1350,
165
+ 1853,
166
+ 1982,
167
+ 2460,
168
+ 2627,
169
+ 3246,
170
+ 3253,
171
+ 3268,
172
+ 3536,
173
+ 3846,
174
+ 3961,
175
+ 4183,
176
+ 4667,
177
+ 6585,
178
+ 6647,
179
+ 7273,
180
+ 9061,
181
+ 9383,
182
+ 10428,
183
+ 10929,
184
+ 11938,
185
+ 12033,
186
+ 12331,
187
+ 12562,
188
+ 13793,
189
+ 14157,
190
+ 14635,
191
+ 15265,
192
+ 15618,
193
+ 16553,
194
+ 16604,
195
+ 18362,
196
+ 18956,
197
+ 20075,
198
+ 21675,
199
+ 22520,
200
+ 26130,
201
+ 26161,
202
+ 26435,
203
+ 28279,
204
+ 29464,
205
+ 31650,
206
+ 32302,
207
+ 32470,
208
+ 36865,
209
+ 42863,
210
+ 47425,
211
+ 49870,
212
+ 50254,
213
+ 50258,
214
+ 50358,
215
+ 50359,
216
+ 50360,
217
+ 50361,
218
+ 50362
219
+ ],
220
+ "task_to_id": {
221
+ "transcribe": 50359,
222
+ "translate": 50358
223
+ },
224
+ "transformers_version": "4.27.0.dev0"
225
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
normalizer.json ADDED
@@ -0,0 +1,1742 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "accessorise": "accessorize",
3
+ "accessorised": "accessorized",
4
+ "accessorises": "accessorizes",
5
+ "accessorising": "accessorizing",
6
+ "acclimatisation": "acclimatization",
7
+ "acclimatise": "acclimatize",
8
+ "acclimatised": "acclimatized",
9
+ "acclimatises": "acclimatizes",
10
+ "acclimatising": "acclimatizing",
11
+ "accoutrements": "accouterments",
12
+ "aeon": "eon",
13
+ "aeons": "eons",
14
+ "aerogramme": "aerogram",
15
+ "aerogrammes": "aerograms",
16
+ "aeroplane": "airplane",
17
+ "aeroplanes": "airplanes",
18
+ "aesthete": "esthete",
19
+ "aesthetes": "esthetes",
20
+ "aesthetic": "esthetic",
21
+ "aesthetically": "esthetically",
22
+ "aesthetics": "esthetics",
23
+ "aetiology": "etiology",
24
+ "ageing": "aging",
25
+ "aggrandisement": "aggrandizement",
26
+ "agonise": "agonize",
27
+ "agonised": "agonized",
28
+ "agonises": "agonizes",
29
+ "agonising": "agonizing",
30
+ "agonisingly": "agonizingly",
31
+ "almanack": "almanac",
32
+ "almanacks": "almanacs",
33
+ "aluminium": "aluminum",
34
+ "amortisable": "amortizable",
35
+ "amortisation": "amortization",
36
+ "amortisations": "amortizations",
37
+ "amortise": "amortize",
38
+ "amortised": "amortized",
39
+ "amortises": "amortizes",
40
+ "amortising": "amortizing",
41
+ "amphitheatre": "amphitheater",
42
+ "amphitheatres": "amphitheaters",
43
+ "anaemia": "anemia",
44
+ "anaemic": "anemic",
45
+ "anaesthesia": "anesthesia",
46
+ "anaesthetic": "anesthetic",
47
+ "anaesthetics": "anesthetics",
48
+ "anaesthetise": "anesthetize",
49
+ "anaesthetised": "anesthetized",
50
+ "anaesthetises": "anesthetizes",
51
+ "anaesthetising": "anesthetizing",
52
+ "anaesthetist": "anesthetist",
53
+ "anaesthetists": "anesthetists",
54
+ "anaesthetize": "anesthetize",
55
+ "anaesthetized": "anesthetized",
56
+ "anaesthetizes": "anesthetizes",
57
+ "anaesthetizing": "anesthetizing",
58
+ "analogue": "analog",
59
+ "analogues": "analogs",
60
+ "analyse": "analyze",
61
+ "analysed": "analyzed",
62
+ "analyses": "analyzes",
63
+ "analysing": "analyzing",
64
+ "anglicise": "anglicize",
65
+ "anglicised": "anglicized",
66
+ "anglicises": "anglicizes",
67
+ "anglicising": "anglicizing",
68
+ "annualised": "annualized",
69
+ "antagonise": "antagonize",
70
+ "antagonised": "antagonized",
71
+ "antagonises": "antagonizes",
72
+ "antagonising": "antagonizing",
73
+ "apologise": "apologize",
74
+ "apologised": "apologized",
75
+ "apologises": "apologizes",
76
+ "apologising": "apologizing",
77
+ "appal": "appall",
78
+ "appals": "appalls",
79
+ "appetiser": "appetizer",
80
+ "appetisers": "appetizers",
81
+ "appetising": "appetizing",
82
+ "appetisingly": "appetizingly",
83
+ "arbour": "arbor",
84
+ "arbours": "arbors",
85
+ "archaeologically": "archeologically",
86
+ "archaeologist": "archeologist",
87
+ "archaeologists": "archeologists",
88
+ "archaeology": "archeology</span>",
89
+ "archeological": "archaeological",
90
+ "ardour": "ardor",
91
+ "armour": "armor",
92
+ "armoured": "armored",
93
+ "armourer": "armorer",
94
+ "armourers": "armorers",
95
+ "armouries": "armories",
96
+ "armoury": "armory",
97
+ "artefact": "artifact",
98
+ "artefacts": "artifacts",
99
+ "authorise": "authorize",
100
+ "authorised": "authorized",
101
+ "authorises": "authorizes",
102
+ "authorising": "authorizing",
103
+ "axe": "ax",
104
+ "backpedalled": "backpedaled",
105
+ "backpedalling": "backpedaling",
106
+ "bannister": "banister",
107
+ "bannisters": "banisters",
108
+ "baptise": "baptize",
109
+ "baptised": "baptized",
110
+ "baptises": "baptizes",
111
+ "baptising": "baptizing",
112
+ "bastardise": "bastardize",
113
+ "bastardised": "bastardized",
114
+ "bastardises": "bastardizes",
115
+ "bastardising": "bastardizing",
116
+ "battleax": "battleaxe",
117
+ "baulk": "balk",
118
+ "baulked": "balked",
119
+ "baulking": "balking",
120
+ "baulks": "balks",
121
+ "bedevilled": "bedeviled",
122
+ "bedevilling": "bedeviling",
123
+ "behaviour": "behavior",
124
+ "behavioural": "behavioral",
125
+ "behaviourism": "behaviorism",
126
+ "behaviourist": "behaviorist",
127
+ "behaviourists": "behaviorists",
128
+ "behaviours": "behaviors",
129
+ "behove": "behoove",
130
+ "behoved": "behooved",
131
+ "behoves": "behooves",
132
+ "bejewelled": "bejeweled",
133
+ "belabour": "belabor",
134
+ "belaboured": "belabored",
135
+ "belabouring": "belaboring",
136
+ "belabours": "belabors",
137
+ "bevelled": "beveled",
138
+ "bevvies": "bevies",
139
+ "bevvy": "bevy",
140
+ "biassed": "biased",
141
+ "biassing": "biasing",
142
+ "bingeing": "binging",
143
+ "bougainvillaea": "bougainvillea",
144
+ "bougainvillaeas": "bougainvilleas",
145
+ "bowdlerise": "bowdlerize",
146
+ "bowdlerised": "bowdlerized",
147
+ "bowdlerises": "bowdlerizes",
148
+ "bowdlerising": "bowdlerizing",
149
+ "breathalyse": "breathalyze",
150
+ "breathalysed": "breathalyzed",
151
+ "breathalyser": "breathalyzer",
152
+ "breathalysers": "breathalyzers",
153
+ "breathalyses": "breathalyzes",
154
+ "breathalysing": "breathalyzing",
155
+ "brutalise": "brutalize",
156
+ "brutalised": "brutalized",
157
+ "brutalises": "brutalizes",
158
+ "brutalising": "brutalizing",
159
+ "busses": "buses",
160
+ "bussing": "busing",
161
+ "caesarean": "cesarean",
162
+ "caesareans": "cesareans",
163
+ "calibre": "caliber",
164
+ "calibres": "calibers",
165
+ "calliper": "caliper",
166
+ "callipers": "calipers",
167
+ "callisthenics": "calisthenics",
168
+ "canalise": "canalize",
169
+ "canalised": "canalized",
170
+ "canalises": "canalizes",
171
+ "canalising": "canalizing",
172
+ "cancelation": "cancellation",
173
+ "cancelations": "cancellations",
174
+ "cancelled": "canceled",
175
+ "cancelling": "canceling",
176
+ "candour": "candor",
177
+ "cannibalise": "cannibalize",
178
+ "cannibalised": "cannibalized",
179
+ "cannibalises": "cannibalizes",
180
+ "cannibalising": "cannibalizing",
181
+ "canonise": "canonize",
182
+ "canonised": "canonized",
183
+ "canonises": "canonizes",
184
+ "canonising": "canonizing",
185
+ "capitalise": "capitalize",
186
+ "capitalised": "capitalized",
187
+ "capitalises": "capitalizes",
188
+ "capitalising": "capitalizing",
189
+ "caramelise": "caramelize",
190
+ "caramelised": "caramelized",
191
+ "caramelises": "caramelizes",
192
+ "caramelising": "caramelizing",
193
+ "carbonise": "carbonize",
194
+ "carbonised": "carbonized",
195
+ "carbonises": "carbonizes",
196
+ "carbonising": "carbonizing",
197
+ "carolled": "caroled",
198
+ "carolling": "caroling",
199
+ "catalogue": "catalog",
200
+ "catalogued": "cataloged",
201
+ "catalogues": "catalogs",
202
+ "cataloguing": "cataloging",
203
+ "catalyse": "catalyze",
204
+ "catalysed": "catalyzed",
205
+ "catalyses": "catalyzes",
206
+ "catalysing": "catalyzing",
207
+ "categorise": "categorize",
208
+ "categorised": "categorized",
209
+ "categorises": "categorizes",
210
+ "categorising": "categorizing",
211
+ "cauterise": "cauterize",
212
+ "cauterised": "cauterized",
213
+ "cauterises": "cauterizes",
214
+ "cauterising": "cauterizing",
215
+ "cavilled": "caviled",
216
+ "cavilling": "caviling",
217
+ "centigramme": "centigram",
218
+ "centigrammes": "centigrams",
219
+ "centilitre": "centiliter",
220
+ "centilitres": "centiliters",
221
+ "centimetre": "centimeter",
222
+ "centimetres": "centimeters",
223
+ "centralise": "centralize",
224
+ "centralised": "centralized",
225
+ "centralises": "centralizes",
226
+ "centralising": "centralizing",
227
+ "centre": "center",
228
+ "centred": "centered",
229
+ "centrefold": "centerfold",
230
+ "centrefolds": "centerfolds",
231
+ "centrepiece": "centerpiece",
232
+ "centrepieces": "centerpieces",
233
+ "centres": "centers",
234
+ "channelled": "channeled",
235
+ "channelling": "channeling",
236
+ "characterise": "characterize",
237
+ "characterised": "characterized",
238
+ "characterises": "characterizes",
239
+ "characterising": "characterizing",
240
+ "cheque": "check",
241
+ "chequebook": "checkbook",
242
+ "chequebooks": "checkbooks",
243
+ "chequered": "checkered",
244
+ "cheques": "checks",
245
+ "chilli": "chili",
246
+ "chimaera": "chimera",
247
+ "chimaeras": "chimeras",
248
+ "chiselled": "chiseled",
249
+ "chiselling": "chiseling",
250
+ "circularise": "circularize",
251
+ "circularised": "circularized",
252
+ "circularises": "circularizes",
253
+ "circularising": "circularizing",
254
+ "civilise": "civilize",
255
+ "civilised": "civilized",
256
+ "civilises": "civilizes",
257
+ "civilising": "civilizing",
258
+ "clamour": "clamor",
259
+ "clamoured": "clamored",
260
+ "clamouring": "clamoring",
261
+ "clamours": "clamors",
262
+ "clangour": "clangor",
263
+ "clarinettist": "clarinetist",
264
+ "clarinettists": "clarinetists",
265
+ "collectivise": "collectivize",
266
+ "collectivised": "collectivized",
267
+ "collectivises": "collectivizes",
268
+ "collectivising": "collectivizing",
269
+ "colonisation": "colonization",
270
+ "colonise": "colonize",
271
+ "colonised": "colonized",
272
+ "coloniser": "colonizer",
273
+ "colonisers": "colonizers",
274
+ "colonises": "colonizes",
275
+ "colonising": "colonizing",
276
+ "colour": "color",
277
+ "colourant": "colorant",
278
+ "colourants": "colorants",
279
+ "coloured": "colored",
280
+ "coloureds": "coloreds",
281
+ "colourful": "colorful",
282
+ "colourfully": "colorfully",
283
+ "colouring": "coloring",
284
+ "colourize": "colorize",
285
+ "colourized": "colorized",
286
+ "colourizes": "colorizes",
287
+ "colourizing": "colorizing",
288
+ "colourless": "colorless",
289
+ "colours": "colors",
290
+ "commercialise": "commercialize",
291
+ "commercialised": "commercialized",
292
+ "commercialises": "commercializes",
293
+ "commercialising": "commercializing",
294
+ "compartmentalise": "compartmentalize",
295
+ "compartmentalised": "compartmentalized",
296
+ "compartmentalises": "compartmentalizes",
297
+ "compartmentalising": "compartmentalizing",
298
+ "computerise": "computerize",
299
+ "computerised": "computerized",
300
+ "computerises": "computerizes",
301
+ "computerising": "computerizing",
302
+ "conceptualise": "conceptualize",
303
+ "conceptualised": "conceptualized",
304
+ "conceptualises": "conceptualizes",
305
+ "conceptualising": "conceptualizing",
306
+ "connexion": "connection",
307
+ "connexions": "connections",
308
+ "contextualise": "contextualize",
309
+ "contextualised": "contextualized",
310
+ "contextualises": "contextualizes",
311
+ "contextualising": "contextualizing",
312
+ "cosier": "cozier",
313
+ "cosies": "cozies",
314
+ "cosiest": "coziest",
315
+ "cosily": "cozily",
316
+ "cosiness": "coziness",
317
+ "cosy": "cozy",
318
+ "councillor": "councilor",
319
+ "councillors": "councilors",
320
+ "counselled": "counseled",
321
+ "counselling": "counseling",
322
+ "counsellor": "counselor",
323
+ "counsellors": "counselors",
324
+ "crenelated": "crenellated",
325
+ "criminalise": "criminalize",
326
+ "criminalised": "criminalized",
327
+ "criminalises": "criminalizes",
328
+ "criminalising": "criminalizing",
329
+ "criticise": "criticize",
330
+ "criticised": "criticized",
331
+ "criticises": "criticizes",
332
+ "criticising": "criticizing",
333
+ "crueller": "crueler",
334
+ "cruellest": "cruelest",
335
+ "crystallisation": "crystallization",
336
+ "crystallise": "crystallize",
337
+ "crystallised": "crystallized",
338
+ "crystallises": "crystallizes",
339
+ "crystallising": "crystallizing",
340
+ "cudgelled": "cudgeled",
341
+ "cudgelling": "cudgeling",
342
+ "customise": "customize",
343
+ "customised": "customized",
344
+ "customises": "customizes",
345
+ "customising": "customizing",
346
+ "cypher": "cipher",
347
+ "cyphers": "ciphers",
348
+ "decentralisation": "decentralization",
349
+ "decentralise": "decentralize",
350
+ "decentralised": "decentralized",
351
+ "decentralises": "decentralizes",
352
+ "decentralising": "decentralizing",
353
+ "decriminalisation": "decriminalization",
354
+ "decriminalise": "decriminalize",
355
+ "decriminalised": "decriminalized",
356
+ "decriminalises": "decriminalizes",
357
+ "decriminalising": "decriminalizing",
358
+ "defence": "defense",
359
+ "defenceless": "defenseless",
360
+ "defences": "defenses",
361
+ "dehumanisation": "dehumanization",
362
+ "dehumanise": "dehumanize",
363
+ "dehumanised": "dehumanized",
364
+ "dehumanises": "dehumanizes",
365
+ "dehumanising": "dehumanizing",
366
+ "demeanour": "demeanor",
367
+ "demilitarisation": "demilitarization",
368
+ "demilitarise": "demilitarize",
369
+ "demilitarised": "demilitarized",
370
+ "demilitarises": "demilitarizes",
371
+ "demilitarising": "demilitarizing",
372
+ "demobilisation": "demobilization",
373
+ "demobilise": "demobilize",
374
+ "demobilised": "demobilized",
375
+ "demobilises": "demobilizes",
376
+ "demobilising": "demobilizing",
377
+ "democratisation": "democratization",
378
+ "democratise": "democratize",
379
+ "democratised": "democratized",
380
+ "democratises": "democratizes",
381
+ "democratising": "democratizing",
382
+ "demonise": "demonize",
383
+ "demonised": "demonized",
384
+ "demonises": "demonizes",
385
+ "demonising": "demonizing",
386
+ "demoralisation": "demoralization",
387
+ "demoralise": "demoralize",
388
+ "demoralised": "demoralized",
389
+ "demoralises": "demoralizes",
390
+ "demoralising": "demoralizing",
391
+ "denationalisation": "denationalization",
392
+ "denationalise": "denationalize",
393
+ "denationalised": "denationalized",
394
+ "denationalises": "denationalizes",
395
+ "denationalising": "denationalizing",
396
+ "deodorise": "deodorize",
397
+ "deodorised": "deodorized",
398
+ "deodorises": "deodorizes",
399
+ "deodorising": "deodorizing",
400
+ "depersonalise": "depersonalize",
401
+ "depersonalised": "depersonalized",
402
+ "depersonalises": "depersonalizes",
403
+ "depersonalising": "depersonalizing",
404
+ "deputise": "deputize",
405
+ "deputised": "deputized",
406
+ "deputises": "deputizes",
407
+ "deputising": "deputizing",
408
+ "desensitisation": "desensitization",
409
+ "desensitise": "desensitize",
410
+ "desensitised": "desensitized",
411
+ "desensitises": "desensitizes",
412
+ "desensitising": "desensitizing",
413
+ "destabilisation": "destabilization",
414
+ "destabilise": "destabilize",
415
+ "destabilised": "destabilized",
416
+ "destabilises": "destabilizes",
417
+ "destabilising": "destabilizing",
418
+ "dialled": "dialed",
419
+ "dialling": "dialing",
420
+ "dialogue": "dialog",
421
+ "dialogues": "dialogs",
422
+ "diarrhoea": "diarrhea",
423
+ "digitise": "digitize",
424
+ "digitised": "digitized",
425
+ "digitises": "digitizes",
426
+ "digitising": "digitizing",
427
+ "disc": "disk",
428
+ "discolour": "discolor",
429
+ "discoloured": "discolored",
430
+ "discolouring": "discoloring",
431
+ "discolours": "discolors",
432
+ "discs": "disks",
433
+ "disembowelled": "disemboweled",
434
+ "disembowelling": "disemboweling",
435
+ "disfavour": "disfavor",
436
+ "dishevelled": "disheveled",
437
+ "dishonour": "dishonor",
438
+ "dishonourable": "dishonorable",
439
+ "dishonourably": "dishonorably",
440
+ "dishonoured": "dishonored",
441
+ "dishonouring": "dishonoring",
442
+ "dishonours": "dishonors",
443
+ "disorganisation": "disorganization",
444
+ "disorganised": "disorganized",
445
+ "distil": "distill",
446
+ "distils": "distills",
447
+ "dramatisation": "dramatization",
448
+ "dramatisations": "dramatizations",
449
+ "dramatise": "dramatize",
450
+ "dramatised": "dramatized",
451
+ "dramatises": "dramatizes",
452
+ "dramatising": "dramatizing",
453
+ "draught": "draft",
454
+ "draughtboard": "draftboard",
455
+ "draughtboards": "draftboards",
456
+ "draughtier": "draftier",
457
+ "draughtiest": "draftiest",
458
+ "draughts": "drafts",
459
+ "draughtsman": "draftsman",
460
+ "draughtsmanship": "draftsmanship",
461
+ "draughtsmen": "draftsmen",
462
+ "draughtswoman": "draftswoman",
463
+ "draughtswomen": "draftswomen",
464
+ "draughty": "drafty",
465
+ "drivelled": "driveled",
466
+ "drivelling": "driveling",
467
+ "duelled": "dueled",
468
+ "duelling": "dueling",
469
+ "economise": "economize",
470
+ "economised": "economized",
471
+ "economises": "economizes",
472
+ "economising": "economizing",
473
+ "editorialise": "editorialize",
474
+ "editorialised": "editorialized",
475
+ "editorialises": "editorializes",
476
+ "editorialising": "editorializing",
477
+ "edoema": "edema",
478
+ "empathise": "empathize",
479
+ "empathised": "empathized",
480
+ "empathises": "empathizes",
481
+ "empathising": "empathizing",
482
+ "emphasise": "emphasize",
483
+ "emphasised": "emphasized",
484
+ "emphasises": "emphasizes",
485
+ "emphasising": "emphasizing",
486
+ "enamelled": "enameled",
487
+ "enamelling": "enameling",
488
+ "enamoured": "enamored",
489
+ "encyclopaedia": "encyclopedia",
490
+ "encyclopaedias": "encyclopedias",
491
+ "encyclopaedic": "encyclopedic",
492
+ "endeavour": "endeavor",
493
+ "endeavoured": "endeavored",
494
+ "endeavouring": "endeavoring",
495
+ "endeavours": "endeavors",
496
+ "energise": "energize",
497
+ "energised": "energized",
498
+ "energises": "energizes",
499
+ "energising": "energizing",
500
+ "enrol": "enroll",
501
+ "enrols": "enrolls",
502
+ "enthral": "enthrall",
503
+ "enthrals": "enthralls",
504
+ "epaulette": "epaulet",
505
+ "epaulettes": "epaulets",
506
+ "epicentre": "epicenter",
507
+ "epicentres": "epicenters",
508
+ "epilogue": "epilog",
509
+ "epilogues": "epilogs",
510
+ "epitomise": "epitomize",
511
+ "epitomised": "epitomized",
512
+ "epitomises": "epitomizes",
513
+ "epitomising": "epitomizing",
514
+ "equalisation": "equalization",
515
+ "equalise": "equalize",
516
+ "equalised": "equalized",
517
+ "equaliser": "equalizer",
518
+ "equalisers": "equalizers",
519
+ "equalises": "equalizes",
520
+ "equalising": "equalizing",
521
+ "eulogise": "eulogize",
522
+ "eulogised": "eulogized",
523
+ "eulogises": "eulogizes",
524
+ "eulogising": "eulogizing",
525
+ "evangelise": "evangelize",
526
+ "evangelised": "evangelized",
527
+ "evangelises": "evangelizes",
528
+ "evangelising": "evangelizing",
529
+ "exorcise": "exorcize",
530
+ "exorcised": "exorcized",
531
+ "exorcises": "exorcizes",
532
+ "exorcising": "exorcizing",
533
+ "extemporisation": "extemporization",
534
+ "extemporise": "extemporize",
535
+ "extemporised": "extemporized",
536
+ "extemporises": "extemporizes",
537
+ "extemporising": "extemporizing",
538
+ "externalisation": "externalization",
539
+ "externalisations": "externalizations",
540
+ "externalise": "externalize",
541
+ "externalised": "externalized",
542
+ "externalises": "externalizes",
543
+ "externalising": "externalizing",
544
+ "factorise": "factorize",
545
+ "factorised": "factorized",
546
+ "factorises": "factorizes",
547
+ "factorising": "factorizing",
548
+ "faecal": "fecal",
549
+ "faeces": "feces",
550
+ "familiarisation": "familiarization",
551
+ "familiarise": "familiarize",
552
+ "familiarised": "familiarized",
553
+ "familiarises": "familiarizes",
554
+ "familiarising": "familiarizing",
555
+ "fantasise": "fantasize",
556
+ "fantasised": "fantasized",
557
+ "fantasises": "fantasizes",
558
+ "fantasising": "fantasizing",
559
+ "favour": "favor",
560
+ "favourable": "favorable",
561
+ "favourably": "favorably",
562
+ "favoured": "favored",
563
+ "favouring": "favoring",
564
+ "favourite": "favorite",
565
+ "favourites": "favorites",
566
+ "favouritism": "favoritism",
567
+ "favours": "favors",
568
+ "feminise": "feminize",
569
+ "feminised": "feminized",
570
+ "feminises": "feminizes",
571
+ "feminising": "feminizing",
572
+ "fertilisation": "fertilization",
573
+ "fertilise": "fertilize",
574
+ "fertilised": "fertilized",
575
+ "fertiliser": "fertilizer",
576
+ "fertilisers": "fertilizers",
577
+ "fertilises": "fertilizes",
578
+ "fertilising": "fertilizing",
579
+ "fervour": "fervor",
580
+ "fibre": "fiber",
581
+ "fibreglass": "fiberglass",
582
+ "fibres": "fibers",
583
+ "fictionalisation": "fictionalization",
584
+ "fictionalisations": "fictionalizations",
585
+ "fictionalise": "fictionalize",
586
+ "fictionalised": "fictionalized",
587
+ "fictionalises": "fictionalizes",
588
+ "fictionalising": "fictionalizing",
589
+ "fillet": "filet",
590
+ "filleted": "fileted",
591
+ "filleting": "fileting",
592
+ "fillets": "filets",
593
+ "finalisation": "finalization",
594
+ "finalise": "finalize",
595
+ "finalised": "finalized",
596
+ "finalises": "finalizes",
597
+ "finalising": "finalizing",
598
+ "flautist": "flutist",
599
+ "flautists": "flutists",
600
+ "flavour": "flavor",
601
+ "flavoured": "flavored",
602
+ "flavouring": "flavoring",
603
+ "flavourings": "flavorings",
604
+ "flavourless": "flavorless",
605
+ "flavours": "flavors",
606
+ "flavoursome": "flavorsome",
607
+ "flyer / flier": "flier / flyer",
608
+ "foetal": "fetal",
609
+ "foetid": "fetid",
610
+ "foetus": "fetus",
611
+ "foetuses": "fetuses",
612
+ "formalisation": "formalization",
613
+ "formalise": "formalize",
614
+ "formalised": "formalized",
615
+ "formalises": "formalizes",
616
+ "formalising": "formalizing",
617
+ "fossilisation": "fossilization",
618
+ "fossilise": "fossilize",
619
+ "fossilised": "fossilized",
620
+ "fossilises": "fossilizes",
621
+ "fossilising": "fossilizing",
622
+ "fraternisation": "fraternization",
623
+ "fraternise": "fraternize",
624
+ "fraternised": "fraternized",
625
+ "fraternises": "fraternizes",
626
+ "fraternising": "fraternizing",
627
+ "fulfil": "fulfill",
628
+ "fulfilment": "fulfillment",
629
+ "fulfils": "fulfills",
630
+ "funnelled": "funneled",
631
+ "funnelling": "funneling",
632
+ "gage": "gauge",
633
+ "gaged": "gauged",
634
+ "gages": "gauges",
635
+ "gaging": "gauging",
636
+ "galvanise": "galvanize",
637
+ "galvanised": "galvanized",
638
+ "galvanises": "galvanizes",
639
+ "galvanising": "galvanizing",
640
+ "gambolled": "gamboled",
641
+ "gambolling": "gamboling",
642
+ "gaol": "jail",
643
+ "gaolbird": "jailbird",
644
+ "gaolbirds": "jailbirds",
645
+ "gaolbreak": "jailbreak",
646
+ "gaolbreaks": "jailbreaks",
647
+ "gaoled": "jailed",
648
+ "gaoler": "jailer",
649
+ "gaolers": "jailers",
650
+ "gaoling": "jailing",
651
+ "gaols": "jails",
652
+ "gasses": "gases",
653
+ "generalisation": "generalization",
654
+ "generalisations": "generalizations",
655
+ "generalise": "generalize",
656
+ "generalised": "generalized",
657
+ "generalises": "generalizes",
658
+ "generalising": "generalizing",
659
+ "ghettoise": "ghettoize",
660
+ "ghettoised": "ghettoized",
661
+ "ghettoises": "ghettoizes",
662
+ "ghettoising": "ghettoizing",
663
+ "gipsies": "gypsies",
664
+ "glamor": "glamour",
665
+ "glamorise": "glamorize",
666
+ "glamorised": "glamorized",
667
+ "glamorises": "glamorizes",
668
+ "glamorising": "glamorizing",
669
+ "globalisation": "globalization",
670
+ "globalise": "globalize",
671
+ "globalised": "globalized",
672
+ "globalises": "globalizes",
673
+ "globalising": "globalizing",
674
+ "glueing": "gluing",
675
+ "goitre": "goiter",
676
+ "goitres": "goiters",
677
+ "gonorrhoea": "gonorrhea",
678
+ "gramme": "gram",
679
+ "grammes": "grams",
680
+ "gravelled": "graveled",
681
+ "grey": "gray",
682
+ "greyed": "grayed",
683
+ "greying": "graying",
684
+ "greyish": "grayish",
685
+ "greyness": "grayness",
686
+ "greys": "grays",
687
+ "grovelled": "groveled",
688
+ "grovelling": "groveling",
689
+ "groyne": "groin",
690
+ "groynes": "groins",
691
+ "gruelling": "grueling",
692
+ "gruellingly": "gruelingly",
693
+ "gryphon": "griffin",
694
+ "gryphons": "griffins",
695
+ "gynaecological": "gynecological",
696
+ "gynaecologist": "gynecologist",
697
+ "gynaecologists": "gynecologists",
698
+ "gynaecology": "gynecology",
699
+ "haematological": "hematological",
700
+ "haematologist": "hematologist",
701
+ "haematologists": "hematologists",
702
+ "haematology": "hematology",
703
+ "haemoglobin": "hemoglobin",
704
+ "haemophilia": "hemophilia",
705
+ "haemophiliac": "hemophiliac",
706
+ "haemophiliacs": "hemophiliacs",
707
+ "haemorrhage": "hemorrhage",
708
+ "haemorrhaged": "hemorrhaged",
709
+ "haemorrhages": "hemorrhages",
710
+ "haemorrhaging": "hemorrhaging",
711
+ "haemorrhoids": "hemorrhoids",
712
+ "harbour": "harbor",
713
+ "harboured": "harbored",
714
+ "harbouring": "harboring",
715
+ "harbours": "harbors",
716
+ "harmonisation": "harmonization",
717
+ "harmonise": "harmonize",
718
+ "harmonised": "harmonized",
719
+ "harmonises": "harmonizes",
720
+ "harmonising": "harmonizing",
721
+ "homoeopath": "homeopath",
722
+ "homoeopathic": "homeopathic",
723
+ "homoeopaths": "homeopaths",
724
+ "homoeopathy": "homeopathy",
725
+ "homogenise": "homogenize",
726
+ "homogenised": "homogenized",
727
+ "homogenises": "homogenizes",
728
+ "homogenising": "homogenizing",
729
+ "honour": "honor",
730
+ "honourable": "honorable",
731
+ "honourably": "honorably",
732
+ "honoured": "honored",
733
+ "honouring": "honoring",
734
+ "honours": "honors",
735
+ "hospitalisation": "hospitalization",
736
+ "hospitalise": "hospitalize",
737
+ "hospitalised": "hospitalized",
738
+ "hospitalises": "hospitalizes",
739
+ "hospitalising": "hospitalizing",
740
+ "humanise": "humanize",
741
+ "humanised": "humanized",
742
+ "humanises": "humanizes",
743
+ "humanising": "humanizing",
744
+ "humour": "humor",
745
+ "humoured": "humored",
746
+ "humouring": "humoring",
747
+ "humourless": "humorless",
748
+ "humours": "humors",
749
+ "hybridise": "hybridize",
750
+ "hybridised": "hybridized",
751
+ "hybridises": "hybridizes",
752
+ "hybridising": "hybridizing",
753
+ "hypnotise": "hypnotize",
754
+ "hypnotised": "hypnotized",
755
+ "hypnotises": "hypnotizes",
756
+ "hypnotising": "hypnotizing",
757
+ "hypothesise": "hypothesize",
758
+ "hypothesised": "hypothesized",
759
+ "hypothesises": "hypothesizes",
760
+ "hypothesising": "hypothesizing",
761
+ "idealisation": "idealization",
762
+ "idealise": "idealize",
763
+ "idealised": "idealized",
764
+ "idealises": "idealizes",
765
+ "idealising": "idealizing",
766
+ "idolise": "idolize",
767
+ "idolised": "idolized",
768
+ "idolises": "idolizes",
769
+ "idolising": "idolizing",
770
+ "immobilisation": "immobilization",
771
+ "immobilise": "immobilize",
772
+ "immobilised": "immobilized",
773
+ "immobiliser": "immobilizer",
774
+ "immobilisers": "immobilizers",
775
+ "immobilises": "immobilizes",
776
+ "immobilising": "immobilizing",
777
+ "immortalise": "immortalize",
778
+ "immortalised": "immortalized",
779
+ "immortalises": "immortalizes",
780
+ "immortalising": "immortalizing",
781
+ "immunisation": "immunization",
782
+ "immunise": "immunize",
783
+ "immunised": "immunized",
784
+ "immunises": "immunizes",
785
+ "immunising": "immunizing",
786
+ "impanelled": "impaneled",
787
+ "impanelling": "impaneling",
788
+ "imperilled": "imperiled",
789
+ "imperilling": "imperiling",
790
+ "individualise": "individualize",
791
+ "individualised": "individualized",
792
+ "individualises": "individualizes",
793
+ "individualising": "individualizing",
794
+ "industrialise": "industrialize",
795
+ "industrialised": "industrialized",
796
+ "industrialises": "industrializes",
797
+ "industrialising": "industrializing",
798
+ "inflexion": "inflection",
799
+ "inflexions": "inflections",
800
+ "initialise": "initialize",
801
+ "initialised": "initialized",
802
+ "initialises": "initializes",
803
+ "initialising": "initializing",
804
+ "initialled": "initialed",
805
+ "initialling": "initialing",
806
+ "instal": "install",
807
+ "instalment": "installment",
808
+ "instalments": "installments",
809
+ "instals": "installs",
810
+ "instil": "instill",
811
+ "instils": "instills",
812
+ "institutionalisation": "institutionalization",
813
+ "institutionalise": "institutionalize",
814
+ "institutionalised": "institutionalized",
815
+ "institutionalises": "institutionalizes",
816
+ "institutionalising": "institutionalizing",
817
+ "intellectualise": "intellectualize",
818
+ "intellectualised": "intellectualized",
819
+ "intellectualises": "intellectualizes",
820
+ "intellectualising": "intellectualizing",
821
+ "internalisation": "internalization",
822
+ "internalise": "internalize",
823
+ "internalised": "internalized",
824
+ "internalises": "internalizes",
825
+ "internalising": "internalizing",
826
+ "internationalisation": "internationalization",
827
+ "internationalise": "internationalize",
828
+ "internationalised": "internationalized",
829
+ "internationalises": "internationalizes",
830
+ "internationalising": "internationalizing",
831
+ "ionisation": "ionization",
832
+ "ionise": "ionize",
833
+ "ionised": "ionized",
834
+ "ioniser": "ionizer",
835
+ "ionisers": "ionizers",
836
+ "ionises": "ionizes",
837
+ "ionising": "ionizing",
838
+ "italicise": "italicize",
839
+ "italicised": "italicized",
840
+ "italicises": "italicizes",
841
+ "italicising": "italicizing",
842
+ "itemise": "itemize",
843
+ "itemised": "itemized",
844
+ "itemises": "itemizes",
845
+ "itemising": "itemizing",
846
+ "jeopardise": "jeopardize",
847
+ "jeopardised": "jeopardized",
848
+ "jeopardises": "jeopardizes",
849
+ "jeopardising": "jeopardizing",
850
+ "jewelled": "jeweled",
851
+ "jeweller": "jeweler",
852
+ "jewellers": "jewelers",
853
+ "jewellery": "jewelry",
854
+ "judgement": "judgment",
855
+ "kilogramme": "kilogram",
856
+ "kilogrammes": "kilograms",
857
+ "kilometre": "kilometer",
858
+ "kilometres": "kilometers",
859
+ "labelled": "labeled",
860
+ "labelling": "labeling",
861
+ "labour": "labor",
862
+ "laboured": "labored",
863
+ "labourer": "laborer",
864
+ "labourers": "laborers",
865
+ "labouring": "laboring",
866
+ "labours": "labors",
867
+ "lacklustre": "lackluster",
868
+ "legalisation": "legalization",
869
+ "legalise": "legalize",
870
+ "legalised": "legalized",
871
+ "legalises": "legalizes",
872
+ "legalising": "legalizing",
873
+ "legitimise": "legitimize",
874
+ "legitimised": "legitimized",
875
+ "legitimises": "legitimizes",
876
+ "legitimising": "legitimizing",
877
+ "leukaemia": "leukemia",
878
+ "levelled": "leveled",
879
+ "leveller": "leveler",
880
+ "levellers": "levelers",
881
+ "levelling": "leveling",
882
+ "libelled": "libeled",
883
+ "libelling": "libeling",
884
+ "libellous": "libelous",
885
+ "liberalisation": "liberalization",
886
+ "liberalise": "liberalize",
887
+ "liberalised": "liberalized",
888
+ "liberalises": "liberalizes",
889
+ "liberalising": "liberalizing",
890
+ "licence": "license",
891
+ "licenced": "licensed",
892
+ "licences": "licenses",
893
+ "licencing": "licensing",
894
+ "likeable": "likable",
895
+ "lionisation": "lionization",
896
+ "lionise": "lionize",
897
+ "lionised": "lionized",
898
+ "lionises": "lionizes",
899
+ "lionising": "lionizing",
900
+ "liquidise": "liquidize",
901
+ "liquidised": "liquidized",
902
+ "liquidiser": "liquidizer",
903
+ "liquidisers": "liquidizers",
904
+ "liquidises": "liquidizes",
905
+ "liquidising": "liquidizing",
906
+ "litre": "liter",
907
+ "litres": "liters",
908
+ "localise": "localize",
909
+ "localised": "localized",
910
+ "localises": "localizes",
911
+ "localising": "localizing",
912
+ "louvre": "louver",
913
+ "louvred": "louvered",
914
+ "louvres": "louvers",
915
+ "lustre": "luster",
916
+ "magnetise": "magnetize",
917
+ "magnetised": "magnetized",
918
+ "magnetises": "magnetizes",
919
+ "magnetising": "magnetizing",
920
+ "manoeuvrability": "maneuverability",
921
+ "manoeuvrable": "maneuverable",
922
+ "manoeuvre": "maneuver",
923
+ "manoeuvred": "maneuvered",
924
+ "manoeuvres": "maneuvers",
925
+ "manoeuvring": "maneuvering",
926
+ "manoeuvrings": "maneuverings",
927
+ "marginalisation": "marginalization",
928
+ "marginalise": "marginalize",
929
+ "marginalised": "marginalized",
930
+ "marginalises": "marginalizes",
931
+ "marginalising": "marginalizing",
932
+ "marshalled": "marshaled",
933
+ "marshalling": "marshaling",
934
+ "marvelled": "marveled",
935
+ "marvelling": "marveling",
936
+ "marvellous": "marvelous",
937
+ "marvellously": "marvelously",
938
+ "materialisation": "materialization",
939
+ "materialise": "materialize",
940
+ "materialised": "materialized",
941
+ "materialises": "materializes",
942
+ "materialising": "materializing",
943
+ "maximisation": "maximization",
944
+ "maximise": "maximize",
945
+ "maximised": "maximized",
946
+ "maximises": "maximizes",
947
+ "maximising": "maximizing",
948
+ "meagre": "meager",
949
+ "mechanisation": "mechanization",
950
+ "mechanise": "mechanize",
951
+ "mechanised": "mechanized",
952
+ "mechanises": "mechanizes",
953
+ "mechanising": "mechanizing",
954
+ "mediaeval": "medieval",
955
+ "memorialise": "memorialize",
956
+ "memorialised": "memorialized",
957
+ "memorialises": "memorializes",
958
+ "memorialising": "memorializing",
959
+ "memorise": "memorize",
960
+ "memorised": "memorized",
961
+ "memorises": "memorizes",
962
+ "memorising": "memorizing",
963
+ "mesmerise": "mesmerize",
964
+ "mesmerised": "mesmerized",
965
+ "mesmerises": "mesmerizes",
966
+ "mesmerising": "mesmerizing",
967
+ "metabolise": "metabolize",
968
+ "metabolised": "metabolized",
969
+ "metabolises": "metabolizes",
970
+ "metabolising": "metabolizing",
971
+ "metre": "meter",
972
+ "metres": "meters",
973
+ "mhm": "hmm",
974
+ "micrometre": "micrometer",
975
+ "micrometres": "micrometers",
976
+ "militarise": "militarize",
977
+ "militarised": "militarized",
978
+ "militarises": "militarizes",
979
+ "militarising": "militarizing",
980
+ "milligramme": "milligram",
981
+ "milligrammes": "milligrams",
982
+ "millilitre": "milliliter",
983
+ "millilitres": "milliliters",
984
+ "millimetre": "millimeter",
985
+ "millimetres": "millimeters",
986
+ "miniaturisation": "miniaturization",
987
+ "miniaturise": "miniaturize",
988
+ "miniaturised": "miniaturized",
989
+ "miniaturises": "miniaturizes",
990
+ "miniaturising": "miniaturizing",
991
+ "minibusses": "minibuses",
992
+ "minimise": "minimize",
993
+ "minimised": "minimized",
994
+ "minimises": "minimizes",
995
+ "minimising": "minimizing",
996
+ "misbehaviour": "misbehavior",
997
+ "misdemeanour": "misdemeanor",
998
+ "misdemeanours": "misdemeanors",
999
+ "misspelt": "misspelled",
1000
+ "mitre": "miter",
1001
+ "mitres": "miters",
1002
+ "mm": "hmm",
1003
+ "mmm": "hmm",
1004
+ "mobilisation": "mobilization",
1005
+ "mobilise": "mobilize",
1006
+ "mobilised": "mobilized",
1007
+ "mobilises": "mobilizes",
1008
+ "mobilising": "mobilizing",
1009
+ "modelled": "modeled",
1010
+ "modeller": "modeler",
1011
+ "modellers": "modelers",
1012
+ "modelling": "modeling",
1013
+ "modernise": "modernize",
1014
+ "modernised": "modernized",
1015
+ "modernises": "modernizes",
1016
+ "modernising": "modernizing",
1017
+ "moisturise": "moisturize",
1018
+ "moisturised": "moisturized",
1019
+ "moisturiser": "moisturizer",
1020
+ "moisturisers": "moisturizers",
1021
+ "moisturises": "moisturizes",
1022
+ "moisturising": "moisturizing",
1023
+ "monologue": "monolog",
1024
+ "monologues": "monologs",
1025
+ "monopolisation": "monopolization",
1026
+ "monopolise": "monopolize",
1027
+ "monopolised": "monopolized",
1028
+ "monopolises": "monopolizes",
1029
+ "monopolising": "monopolizing",
1030
+ "moralise": "moralize",
1031
+ "moralised": "moralized",
1032
+ "moralises": "moralizes",
1033
+ "moralising": "moralizing",
1034
+ "motorised": "motorized",
1035
+ "mould": "mold",
1036
+ "moulded": "molded",
1037
+ "moulder": "molder",
1038
+ "mouldered": "moldered",
1039
+ "mouldering": "moldering",
1040
+ "moulders": "molders",
1041
+ "mouldier": "moldier",
1042
+ "mouldiest": "moldiest",
1043
+ "moulding": "molding",
1044
+ "mouldings": "moldings",
1045
+ "moulds": "molds",
1046
+ "mouldy": "moldy",
1047
+ "moult": "molt",
1048
+ "moulted": "molted",
1049
+ "moulting": "molting",
1050
+ "moults": "molts",
1051
+ "moustache": "mustache",
1052
+ "moustached": "mustached",
1053
+ "moustaches": "mustaches",
1054
+ "moustachioed": "mustachioed",
1055
+ "multicoloured": "multicolored",
1056
+ "nationalisation": "nationalization",
1057
+ "nationalisations": "nationalizations",
1058
+ "nationalise": "nationalize",
1059
+ "nationalised": "nationalized",
1060
+ "nationalises": "nationalizes",
1061
+ "nationalising": "nationalizing",
1062
+ "naturalisation": "naturalization",
1063
+ "naturalise": "naturalize",
1064
+ "naturalised": "naturalized",
1065
+ "naturalises": "naturalizes",
1066
+ "naturalising": "naturalizing",
1067
+ "neighbour": "neighbor",
1068
+ "neighbourhood": "neighborhood",
1069
+ "neighbourhoods": "neighborhoods",
1070
+ "neighbouring": "neighboring",
1071
+ "neighbourliness": "neighborliness",
1072
+ "neighbourly": "neighborly",
1073
+ "neighbours": "neighbors",
1074
+ "neutralisation": "neutralization",
1075
+ "neutralise": "neutralize",
1076
+ "neutralised": "neutralized",
1077
+ "neutralises": "neutralizes",
1078
+ "neutralising": "neutralizing",
1079
+ "normalisation": "normalization",
1080
+ "normalise": "normalize",
1081
+ "normalised": "normalized",
1082
+ "normalises": "normalizes",
1083
+ "normalising": "normalizing",
1084
+ "odour": "odor",
1085
+ "odourless": "odorless",
1086
+ "odours": "odors",
1087
+ "oesophagus": "esophagus",
1088
+ "oesophaguses": "esophaguses",
1089
+ "oestrogen": "estrogen",
1090
+ "offence": "offense",
1091
+ "offences": "offenses",
1092
+ "omelette": "omelet",
1093
+ "omelettes": "omelets",
1094
+ "optimise": "optimize",
1095
+ "optimised": "optimized",
1096
+ "optimises": "optimizes",
1097
+ "optimising": "optimizing",
1098
+ "organisation": "organization",
1099
+ "organisational": "organizational",
1100
+ "organisations": "organizations",
1101
+ "organise": "organize",
1102
+ "organised": "organized",
1103
+ "organiser": "organizer",
1104
+ "organisers": "organizers",
1105
+ "organises": "organizes",
1106
+ "organising": "organizing",
1107
+ "orthopaedic": "orthopedic",
1108
+ "orthopaedics": "orthopedics",
1109
+ "ostracise": "ostracize",
1110
+ "ostracised": "ostracized",
1111
+ "ostracises": "ostracizes",
1112
+ "ostracising": "ostracizing",
1113
+ "outmanoeuvre": "outmaneuver",
1114
+ "outmanoeuvred": "outmaneuvered",
1115
+ "outmanoeuvres": "outmaneuvers",
1116
+ "outmanoeuvring": "outmaneuvering",
1117
+ "overemphasise": "overemphasize",
1118
+ "overemphasised": "overemphasized",
1119
+ "overemphasises": "overemphasizes",
1120
+ "overemphasising": "overemphasizing",
1121
+ "oxidisation": "oxidization",
1122
+ "oxidise": "oxidize",
1123
+ "oxidised": "oxidized",
1124
+ "oxidises": "oxidizes",
1125
+ "oxidising": "oxidizing",
1126
+ "paederast": "pederast",
1127
+ "paederasts": "pederasts",
1128
+ "paediatric": "pediatric",
1129
+ "paediatrician": "pediatrician",
1130
+ "paediatricians": "pediatricians",
1131
+ "paediatrics": "pediatrics",
1132
+ "paedophile": "pedophile",
1133
+ "paedophiles": "pedophiles",
1134
+ "paedophilia": "pedophilia",
1135
+ "palaeolithic": "paleolithic",
1136
+ "palaeontologist": "paleontologist",
1137
+ "palaeontologists": "paleontologists",
1138
+ "palaeontology": "paleontology",
1139
+ "panelled": "paneled",
1140
+ "panelling": "paneling",
1141
+ "panellist": "panelist",
1142
+ "panellists": "panelists",
1143
+ "paralyse": "paralyze",
1144
+ "paralysed": "paralyzed",
1145
+ "paralyses": "paralyzes",
1146
+ "paralysing": "paralyzing",
1147
+ "parcelled": "parceled",
1148
+ "parcelling": "parceling",
1149
+ "parlour": "parlor",
1150
+ "parlours": "parlors",
1151
+ "particularise": "particularize",
1152
+ "particularised": "particularized",
1153
+ "particularises": "particularizes",
1154
+ "particularising": "particularizing",
1155
+ "passivisation": "passivization",
1156
+ "passivise": "passivize",
1157
+ "passivised": "passivized",
1158
+ "passivises": "passivizes",
1159
+ "passivising": "passivizing",
1160
+ "pasteurisation": "pasteurization",
1161
+ "pasteurise": "pasteurize",
1162
+ "pasteurised": "pasteurized",
1163
+ "pasteurises": "pasteurizes",
1164
+ "pasteurising": "pasteurizing",
1165
+ "patronise": "patronize",
1166
+ "patronised": "patronized",
1167
+ "patronises": "patronizes",
1168
+ "patronising": "patronizing",
1169
+ "patronisingly": "patronizingly",
1170
+ "pedalled": "pedaled",
1171
+ "pedalling": "pedaling",
1172
+ "pedestrianisation": "pedestrianization",
1173
+ "pedestrianise": "pedestrianize",
1174
+ "pedestrianised": "pedestrianized",
1175
+ "pedestrianises": "pedestrianizes",
1176
+ "pedestrianising": "pedestrianizing",
1177
+ "penalise": "penalize",
1178
+ "penalised": "penalized",
1179
+ "penalises": "penalizes",
1180
+ "penalising": "penalizing",
1181
+ "pencilled": "penciled",
1182
+ "pencilling": "penciling",
1183
+ "personalise": "personalize",
1184
+ "personalised": "personalized",
1185
+ "personalises": "personalizes",
1186
+ "personalising": "personalizing",
1187
+ "pharmacopoeia": "pharmacopeia",
1188
+ "pharmacopoeias": "pharmacopeias",
1189
+ "philosophise": "philosophize",
1190
+ "philosophised": "philosophized",
1191
+ "philosophises": "philosophizes",
1192
+ "philosophising": "philosophizing",
1193
+ "philtre": "filter",
1194
+ "philtres": "filters",
1195
+ "phoney": "phony",
1196
+ "plagiarise": "plagiarize",
1197
+ "plagiarised": "plagiarized",
1198
+ "plagiarises": "plagiarizes",
1199
+ "plagiarising": "plagiarizing",
1200
+ "plough": "plow",
1201
+ "ploughed": "plowed",
1202
+ "ploughing": "plowing",
1203
+ "ploughman": "plowman",
1204
+ "ploughmen": "plowmen",
1205
+ "ploughs": "plows",
1206
+ "ploughshare": "plowshare",
1207
+ "ploughshares": "plowshares",
1208
+ "polarisation": "polarization",
1209
+ "polarise": "polarize",
1210
+ "polarised": "polarized",
1211
+ "polarises": "polarizes",
1212
+ "polarising": "polarizing",
1213
+ "politicisation": "politicization",
1214
+ "politicise": "politicize",
1215
+ "politicised": "politicized",
1216
+ "politicises": "politicizes",
1217
+ "politicising": "politicizing",
1218
+ "popularisation": "popularization",
1219
+ "popularise": "popularize",
1220
+ "popularised": "popularized",
1221
+ "popularises": "popularizes",
1222
+ "popularising": "popularizing",
1223
+ "pouffe": "pouf",
1224
+ "pouffes": "poufs",
1225
+ "practise": "practice",
1226
+ "practised": "practiced",
1227
+ "practises": "practices",
1228
+ "practising": "practicing",
1229
+ "praesidium": "presidium",
1230
+ "praesidiums": "presidiums",
1231
+ "pressurisation": "pressurization",
1232
+ "pressurise": "pressurize",
1233
+ "pressurised": "pressurized",
1234
+ "pressurises": "pressurizes",
1235
+ "pressurising": "pressurizing",
1236
+ "pretence": "pretense",
1237
+ "pretences": "pretenses",
1238
+ "primaeval": "primeval",
1239
+ "prioritisation": "prioritization",
1240
+ "prioritise": "prioritize",
1241
+ "prioritised": "prioritized",
1242
+ "prioritises": "prioritizes",
1243
+ "prioritising": "prioritizing",
1244
+ "privatisation": "privatization",
1245
+ "privatisations": "privatizations",
1246
+ "privatise": "privatize",
1247
+ "privatised": "privatized",
1248
+ "privatises": "privatizes",
1249
+ "privatising": "privatizing",
1250
+ "professionalisation": "professionalization",
1251
+ "professionalise": "professionalize",
1252
+ "professionalised": "professionalized",
1253
+ "professionalises": "professionalizes",
1254
+ "professionalising": "professionalizing",
1255
+ "programme": "program",
1256
+ "programmes": "programs",
1257
+ "prologue": "prolog",
1258
+ "prologues": "prologs",
1259
+ "propagandise": "propagandize",
1260
+ "propagandised": "propagandized",
1261
+ "propagandises": "propagandizes",
1262
+ "propagandising": "propagandizing",
1263
+ "proselytise": "proselytize",
1264
+ "proselytised": "proselytized",
1265
+ "proselytiser": "proselytizer",
1266
+ "proselytisers": "proselytizers",
1267
+ "proselytises": "proselytizes",
1268
+ "proselytising": "proselytizing",
1269
+ "psychoanalyse": "psychoanalyze",
1270
+ "psychoanalysed": "psychoanalyzed",
1271
+ "psychoanalyses": "psychoanalyzes",
1272
+ "psychoanalysing": "psychoanalyzing",
1273
+ "publicise": "publicize",
1274
+ "publicised": "publicized",
1275
+ "publicises": "publicizes",
1276
+ "publicising": "publicizing",
1277
+ "pulverisation": "pulverization",
1278
+ "pulverise": "pulverize",
1279
+ "pulverised": "pulverized",
1280
+ "pulverises": "pulverizes",
1281
+ "pulverising": "pulverizing",
1282
+ "pummelled": "pummel",
1283
+ "pummelling": "pummeled",
1284
+ "pyjama": "pajama",
1285
+ "pyjamas": "pajamas",
1286
+ "pzazz": "pizzazz",
1287
+ "quarrelled": "quarreled",
1288
+ "quarrelling": "quarreling",
1289
+ "radicalise": "radicalize",
1290
+ "radicalised": "radicalized",
1291
+ "radicalises": "radicalizes",
1292
+ "radicalising": "radicalizing",
1293
+ "rancour": "rancor",
1294
+ "randomise": "randomize",
1295
+ "randomised": "randomized",
1296
+ "randomises": "randomizes",
1297
+ "randomising": "randomizing",
1298
+ "rationalisation": "rationalization",
1299
+ "rationalisations": "rationalizations",
1300
+ "rationalise": "rationalize",
1301
+ "rationalised": "rationalized",
1302
+ "rationalises": "rationalizes",
1303
+ "rationalising": "rationalizing",
1304
+ "ravelled": "raveled",
1305
+ "ravelling": "raveling",
1306
+ "realisable": "realizable",
1307
+ "realisation": "realization",
1308
+ "realisations": "realizations",
1309
+ "realise": "realize",
1310
+ "realised": "realized",
1311
+ "realises": "realizes",
1312
+ "realising": "realizing",
1313
+ "recognisable": "recognizable",
1314
+ "recognisably": "recognizably",
1315
+ "recognisance": "recognizance",
1316
+ "recognise": "recognize",
1317
+ "recognised": "recognized",
1318
+ "recognises": "recognizes",
1319
+ "recognising": "recognizing",
1320
+ "reconnoitre": "reconnoiter",
1321
+ "reconnoitred": "reconnoitered",
1322
+ "reconnoitres": "reconnoiters",
1323
+ "reconnoitring": "reconnoitering",
1324
+ "refuelled": "refueled",
1325
+ "refuelling": "refueling",
1326
+ "regularisation": "regularization",
1327
+ "regularise": "regularize",
1328
+ "regularised": "regularized",
1329
+ "regularises": "regularizes",
1330
+ "regularising": "regularizing",
1331
+ "remodelled": "remodeled",
1332
+ "remodelling": "remodeling",
1333
+ "remould": "remold",
1334
+ "remoulded": "remolded",
1335
+ "remoulding": "remolding",
1336
+ "remoulds": "remolds",
1337
+ "reorganisation": "reorganization",
1338
+ "reorganisations": "reorganizations",
1339
+ "reorganise": "reorganize",
1340
+ "reorganised": "reorganized",
1341
+ "reorganises": "reorganizes",
1342
+ "reorganising": "reorganizing",
1343
+ "revelled": "reveled",
1344
+ "reveller": "reveler",
1345
+ "revellers": "revelers",
1346
+ "revelling": "reveling",
1347
+ "revitalise": "revitalize",
1348
+ "revitalised": "revitalized",
1349
+ "revitalises": "revitalizes",
1350
+ "revitalising": "revitalizing",
1351
+ "revolutionise": "revolutionize",
1352
+ "revolutionised": "revolutionized",
1353
+ "revolutionises": "revolutionizes",
1354
+ "revolutionising": "revolutionizing",
1355
+ "rhapsodise": "rhapsodize",
1356
+ "rhapsodised": "rhapsodized",
1357
+ "rhapsodises": "rhapsodizes",
1358
+ "rhapsodising": "rhapsodizing",
1359
+ "rigour": "rigor",
1360
+ "rigours": "rigors",
1361
+ "ritualised": "ritualized",
1362
+ "rivalled": "rivaled",
1363
+ "rivalling": "rivaling",
1364
+ "romanticise": "romanticize",
1365
+ "romanticised": "romanticized",
1366
+ "romanticises": "romanticizes",
1367
+ "romanticising": "romanticizing",
1368
+ "rumour": "rumor",
1369
+ "rumoured": "rumored",
1370
+ "rumours": "rumors",
1371
+ "sabre": "saber",
1372
+ "sabres": "sabers",
1373
+ "saltpetre": "saltpeter",
1374
+ "sanitise": "sanitize",
1375
+ "sanitised": "sanitized",
1376
+ "sanitises": "sanitizes",
1377
+ "sanitising": "sanitizing",
1378
+ "satirise": "satirize",
1379
+ "satirised": "satirized",
1380
+ "satirises": "satirizes",
1381
+ "satirising": "satirizing",
1382
+ "saviour": "savior",
1383
+ "saviours": "saviors",
1384
+ "savour": "savor",
1385
+ "savoured": "savored",
1386
+ "savouries": "savories",
1387
+ "savouring": "savoring",
1388
+ "savours": "savors",
1389
+ "savoury": "savory",
1390
+ "scandalise": "scandalize",
1391
+ "scandalised": "scandalized",
1392
+ "scandalises": "scandalizes",
1393
+ "scandalising": "scandalizing",
1394
+ "sceptic": "skeptic",
1395
+ "sceptical": "skeptical",
1396
+ "sceptically": "skeptically",
1397
+ "scepticism": "skepticism",
1398
+ "sceptics": "skeptics",
1399
+ "sceptre": "scepter",
1400
+ "sceptres": "scepters",
1401
+ "scrutinise": "scrutinize",
1402
+ "scrutinised": "scrutinized",
1403
+ "scrutinises": "scrutinizes",
1404
+ "scrutinising": "scrutinizing",
1405
+ "secularisation": "secularization",
1406
+ "secularise": "secularize",
1407
+ "secularised": "secularized",
1408
+ "secularises": "secularizes",
1409
+ "secularising": "secularizing",
1410
+ "sensationalise": "sensationalize",
1411
+ "sensationalised": "sensationalized",
1412
+ "sensationalises": "sensationalizes",
1413
+ "sensationalising": "sensationalizing",
1414
+ "sensitise": "sensitize",
1415
+ "sensitised": "sensitized",
1416
+ "sensitises": "sensitizes",
1417
+ "sensitising": "sensitizing",
1418
+ "sentimentalise": "sentimentalize",
1419
+ "sentimentalised": "sentimentalized",
1420
+ "sentimentalises": "sentimentalizes",
1421
+ "sentimentalising": "sentimentalizing",
1422
+ "sepulchre": "sepulcher",
1423
+ "sepulchres": "sepulchers",
1424
+ "serialisation": "serialization",
1425
+ "serialisations": "serializations",
1426
+ "serialise": "serialize",
1427
+ "serialised": "serialized",
1428
+ "serialises": "serializes",
1429
+ "serialising": "serializing",
1430
+ "sermonise": "sermonize",
1431
+ "sermonised": "sermonized",
1432
+ "sermonises": "sermonizes",
1433
+ "sermonising": "sermonizing",
1434
+ "sheikh": "sheik",
1435
+ "shovelled": "shoveled",
1436
+ "shovelling": "shoveling",
1437
+ "shrivelled": "shriveled",
1438
+ "shrivelling": "shriveling",
1439
+ "signalise": "signalize",
1440
+ "signalised": "signalized",
1441
+ "signalises": "signalizes",
1442
+ "signalising": "signalizing",
1443
+ "signalled": "signaled",
1444
+ "signalling": "signaling",
1445
+ "smoulder": "smolder",
1446
+ "smouldered": "smoldered",
1447
+ "smouldering": "smoldering",
1448
+ "smoulders": "smolders",
1449
+ "snivelled": "sniveled",
1450
+ "snivelling": "sniveling",
1451
+ "snorkelled": "snorkeled",
1452
+ "snorkelling": "snorkeling",
1453
+ "snowplough": "snowplow",
1454
+ "snowploughs": "snowplow",
1455
+ "socialisation": "socialization",
1456
+ "socialise": "socialize",
1457
+ "socialised": "socialized",
1458
+ "socialises": "socializes",
1459
+ "socialising": "socializing",
1460
+ "sodomise": "sodomize",
1461
+ "sodomised": "sodomized",
1462
+ "sodomises": "sodomizes",
1463
+ "sodomising": "sodomizing",
1464
+ "solemnise": "solemnize",
1465
+ "solemnised": "solemnized",
1466
+ "solemnises": "solemnizes",
1467
+ "solemnising": "solemnizing",
1468
+ "sombre": "somber",
1469
+ "specialisation": "specialization",
1470
+ "specialisations": "specializations",
1471
+ "specialise": "specialize",
1472
+ "specialised": "specialized",
1473
+ "specialises": "specializes",
1474
+ "specialising": "specializing",
1475
+ "spectre": "specter",
1476
+ "spectres": "specters",
1477
+ "spiralled": "spiraled",
1478
+ "spiralling": "spiraling",
1479
+ "splendour": "splendor",
1480
+ "splendours": "splendors",
1481
+ "squirrelled": "squirreled",
1482
+ "squirrelling": "squirreling",
1483
+ "stabilisation": "stabilization",
1484
+ "stabilise": "stabilize",
1485
+ "stabilised": "stabilized",
1486
+ "stabiliser": "stabilizer",
1487
+ "stabilisers": "stabilizers",
1488
+ "stabilises": "stabilizes",
1489
+ "stabilising": "stabilizing",
1490
+ "standardisation": "standardization",
1491
+ "standardise": "standardize",
1492
+ "standardised": "standardized",
1493
+ "standardises": "standardizes",
1494
+ "standardising": "standardizing",
1495
+ "stencilled": "stenciled",
1496
+ "stencilling": "stenciling",
1497
+ "sterilisation": "sterilization",
1498
+ "sterilisations": "sterilizations",
1499
+ "sterilise": "sterilize",
1500
+ "sterilised": "sterilized",
1501
+ "steriliser": "sterilizer",
1502
+ "sterilisers": "sterilizers",
1503
+ "sterilises": "sterilizes",
1504
+ "sterilising": "sterilizing",
1505
+ "stigmatisation": "stigmatization",
1506
+ "stigmatise": "stigmatize",
1507
+ "stigmatised": "stigmatized",
1508
+ "stigmatises": "stigmatizes",
1509
+ "stigmatising": "stigmatizing",
1510
+ "storey": "story",
1511
+ "storeys": "stories",
1512
+ "subsidisation": "subsidization",
1513
+ "subsidise": "subsidize",
1514
+ "subsidised": "subsidized",
1515
+ "subsidiser": "subsidizer",
1516
+ "subsidisers": "subsidizers",
1517
+ "subsidises": "subsidizes",
1518
+ "subsidising": "subsidizing",
1519
+ "succour": "succor",
1520
+ "succoured": "succored",
1521
+ "succouring": "succoring",
1522
+ "succours": "succors",
1523
+ "sulphate": "sulfate",
1524
+ "sulphates": "sulfates",
1525
+ "sulphide": "sulfide",
1526
+ "sulphides": "sulfides",
1527
+ "sulphur": "sulfur",
1528
+ "sulphurous": "sulfurous",
1529
+ "summarise": "summarize",
1530
+ "summarised": "summarized",
1531
+ "summarises": "summarizes",
1532
+ "summarising": "summarizing",
1533
+ "swivelled": "swiveled",
1534
+ "swivelling": "swiveling",
1535
+ "symbolise": "symbolize",
1536
+ "symbolised": "symbolized",
1537
+ "symbolises": "symbolizes",
1538
+ "symbolising": "symbolizing",
1539
+ "sympathise": "sympathize",
1540
+ "sympathised": "sympathized",
1541
+ "sympathiser": "sympathizer",
1542
+ "sympathisers": "sympathizers",
1543
+ "sympathises": "sympathizes",
1544
+ "sympathising": "sympathizing",
1545
+ "synchronisation": "synchronization",
1546
+ "synchronise": "synchronize",
1547
+ "synchronised": "synchronized",
1548
+ "synchronises": "synchronizes",
1549
+ "synchronising": "synchronizing",
1550
+ "synthesise": "synthesize",
1551
+ "synthesised": "synthesized",
1552
+ "synthesiser": "synthesizer",
1553
+ "synthesisers": "synthesizers",
1554
+ "synthesises": "synthesizes",
1555
+ "synthesising": "synthesizing",
1556
+ "syphon": "siphon",
1557
+ "syphoned": "siphoned",
1558
+ "syphoning": "siphoning",
1559
+ "syphons": "siphons",
1560
+ "systematisation": "systematization",
1561
+ "systematise": "systematize",
1562
+ "systematised": "systematized",
1563
+ "systematises": "systematizes",
1564
+ "systematising": "systematizing",
1565
+ "tantalise": "tantalize",
1566
+ "tantalised": "tantalized",
1567
+ "tantalises": "tantalizes",
1568
+ "tantalising": "tantalizing",
1569
+ "tantalisingly": "tantalizingly",
1570
+ "tasselled": "tasseled",
1571
+ "technicolour": "technicolor",
1572
+ "temporise": "temporize",
1573
+ "temporised": "temporized",
1574
+ "temporises": "temporizes",
1575
+ "temporising": "temporizing",
1576
+ "tenderise": "tenderize",
1577
+ "tenderised": "tenderized",
1578
+ "tenderises": "tenderizes",
1579
+ "tenderising": "tenderizing",
1580
+ "terrorise": "terrorize",
1581
+ "terrorised": "terrorized",
1582
+ "terrorises": "terrorizes",
1583
+ "terrorising": "terrorizing",
1584
+ "theatre": "theater",
1585
+ "theatregoer": "theatergoer",
1586
+ "theatregoers": "theatergoers",
1587
+ "theatres": "theaters",
1588
+ "theorise": "theorize",
1589
+ "theorised": "theorized",
1590
+ "theorises": "theorizes",
1591
+ "theorising": "theorizing",
1592
+ "tonne": "ton",
1593
+ "tonnes": "tons",
1594
+ "towelled": "toweled",
1595
+ "towelling": "toweling",
1596
+ "toxaemia": "toxemia",
1597
+ "tranquillise": "tranquilize",
1598
+ "tranquillised": "tranquilized",
1599
+ "tranquilliser": "tranquilizer",
1600
+ "tranquillisers": "tranquilizers",
1601
+ "tranquillises": "tranquilizes",
1602
+ "tranquillising": "tranquilizing",
1603
+ "tranquillity": "tranquility",
1604
+ "tranquillize": "tranquilize",
1605
+ "tranquillized": "tranquilized",
1606
+ "tranquillizer": "tranquilizer",
1607
+ "tranquillizers": "tranquilizers",
1608
+ "tranquillizes": "tranquilizes",
1609
+ "tranquillizing": "tranquilizing",
1610
+ "tranquilly": "tranquility",
1611
+ "transistorised": "transistorized",
1612
+ "traumatise": "traumatize",
1613
+ "traumatised": "traumatized",
1614
+ "traumatises": "traumatizes",
1615
+ "traumatising": "traumatizing",
1616
+ "travelled": "traveled",
1617
+ "traveller": "traveler",
1618
+ "travellers": "travelers",
1619
+ "travelling": "traveling",
1620
+ "travelog": "travelogue",
1621
+ "travelogs": "travelogues",
1622
+ "trialled": "trialed",
1623
+ "trialling": "trialing",
1624
+ "tricolour": "tricolor",
1625
+ "tricolours": "tricolors",
1626
+ "trivialise": "trivialize",
1627
+ "trivialised": "trivialized",
1628
+ "trivialises": "trivializes",
1629
+ "trivialising": "trivializing",
1630
+ "tumour": "tumor",
1631
+ "tumours": "tumors",
1632
+ "tunnelled": "tunneled",
1633
+ "tunnelling": "tunneling",
1634
+ "tyrannise": "tyrannize",
1635
+ "tyrannised": "tyrannized",
1636
+ "tyrannises": "tyrannizes",
1637
+ "tyrannising": "tyrannizing",
1638
+ "tyre": "tire",
1639
+ "tyres": "tires",
1640
+ "unauthorised": "unauthorized",
1641
+ "uncivilised": "uncivilized",
1642
+ "underutilised": "underutilized",
1643
+ "unequalled": "unequaled",
1644
+ "unfavourable": "unfavorable",
1645
+ "unfavourably": "unfavorably",
1646
+ "unionisation": "unionization",
1647
+ "unionise": "unionize",
1648
+ "unionised": "unionized",
1649
+ "unionises": "unionizes",
1650
+ "unionising": "unionizing",
1651
+ "unorganised": "unorganized",
1652
+ "unravelled": "unraveled",
1653
+ "unravelling": "unraveling",
1654
+ "unrecognisable": "unrecognizable",
1655
+ "unrecognised": "unrecognized",
1656
+ "unrivalled": "unrivaled",
1657
+ "unsavoury": "unsavory",
1658
+ "untrammelled": "untrammeled",
1659
+ "urbanisation": "urbanization",
1660
+ "urbanise": "urbanize",
1661
+ "urbanised": "urbanized",
1662
+ "urbanises": "urbanizes",
1663
+ "urbanising": "urbanizing",
1664
+ "utilisable": "utilizable",
1665
+ "utilisation": "utilization",
1666
+ "utilise": "utilize",
1667
+ "utilised": "utilized",
1668
+ "utilises": "utilizes",
1669
+ "utilising": "utilizing",
1670
+ "valour": "valor",
1671
+ "vandalise": "vandalize",
1672
+ "vandalised": "vandalized",
1673
+ "vandalises": "vandalizes",
1674
+ "vandalising": "vandalizing",
1675
+ "vaporisation": "vaporization",
1676
+ "vaporise": "vaporize",
1677
+ "vaporised": "vaporized",
1678
+ "vaporises": "vaporizes",
1679
+ "vaporising": "vaporizing",
1680
+ "vapour": "vapor",
1681
+ "vapours": "vapors",
1682
+ "verbalise": "verbalize",
1683
+ "verbalised": "verbalized",
1684
+ "verbalises": "verbalizes",
1685
+ "verbalising": "verbalizing",
1686
+ "victimisation": "victimization",
1687
+ "victimise": "victimize",
1688
+ "victimised": "victimized",
1689
+ "victimises": "victimizes",
1690
+ "victimising": "victimizing",
1691
+ "videodisc": "videodisk",
1692
+ "videodiscs": "videodisks",
1693
+ "vigour": "vigor",
1694
+ "visualisation": "visualization",
1695
+ "visualisations": "visualizations",
1696
+ "visualise": "visualize",
1697
+ "visualised": "visualized",
1698
+ "visualises": "visualizes",
1699
+ "visualising": "visualizing",
1700
+ "vocalisation": "vocalization",
1701
+ "vocalisations": "vocalizations",
1702
+ "vocalise": "vocalize",
1703
+ "vocalised": "vocalized",
1704
+ "vocalises": "vocalizes",
1705
+ "vocalising": "vocalizing",
1706
+ "vulcanised": "vulcanized",
1707
+ "vulgarisation": "vulgarization",
1708
+ "vulgarise": "vulgarize",
1709
+ "vulgarised": "vulgarized",
1710
+ "vulgarises": "vulgarizes",
1711
+ "vulgarising": "vulgarizing",
1712
+ "waggon": "wagon",
1713
+ "waggons": "wagons",
1714
+ "watercolour": "watercolor",
1715
+ "watercolours": "watercolors",
1716
+ "weaselled": "weaseled",
1717
+ "weaselling": "weaseling",
1718
+ "westernisation": "westernization",
1719
+ "westernise": "westernize",
1720
+ "westernised": "westernized",
1721
+ "westernises": "westernizes",
1722
+ "westernising": "westernizing",
1723
+ "womanise": "womanize",
1724
+ "womanised": "womanized",
1725
+ "womaniser": "womanizer",
1726
+ "womanisers": "womanizers",
1727
+ "womanises": "womanizes",
1728
+ "womanising": "womanizing",
1729
+ "woollen": "woolen",
1730
+ "woollens": "woolens",
1731
+ "woollies": "woolies",
1732
+ "woolly": "wooly",
1733
+ "worshipped": "worshiped",
1734
+ "worshipper": "worshiper",
1735
+ "worshipping": "worshiping",
1736
+ "yodelled": "yodeled",
1737
+ "yodelling": "yodeling",
1738
+ "yoghourt": "yogurt",
1739
+ "yoghourts": "yogurts",
1740
+ "yoghurt": "yogurt",
1741
+ "yoghurts": "yogurts"
1742
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunk_length": 30,
3
+ "feature_extractor_type": "WhisperFeatureExtractor",
4
+ "feature_size": 80,
5
+ "hop_length": 160,
6
+ "n_fft": 400,
7
+ "n_samples": 480000,
8
+ "nb_max_frames": 3000,
9
+ "padding_side": "right",
10
+ "padding_value": 0.0,
11
+ "processor_class": "WhisperProcessor",
12
+ "return_attention_mask": false,
13
+ "sampling_rate": 16000
14
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74128f2aec307840af29289b53b1cc765314f43c18b4d5fff98859e2ae4bfa2d
3
+ size 6173647530
special_tokens_map.json ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|endoftext|>",
4
+ "<|startoftranscript|>",
5
+ "<|en|>",
6
+ "<|zh|>",
7
+ "<|de|>",
8
+ "<|es|>",
9
+ "<|ru|>",
10
+ "<|ko|>",
11
+ "<|fr|>",
12
+ "<|ja|>",
13
+ "<|pt|>",
14
+ "<|tr|>",
15
+ "<|pl|>",
16
+ "<|ca|>",
17
+ "<|nl|>",
18
+ "<|ar|>",
19
+ "<|sv|>",
20
+ "<|it|>",
21
+ "<|id|>",
22
+ "<|hi|>",
23
+ "<|fi|>",
24
+ "<|vi|>",
25
+ "<|he|>",
26
+ "<|uk|>",
27
+ "<|el|>",
28
+ "<|ms|>",
29
+ "<|cs|>",
30
+ "<|ro|>",
31
+ "<|da|>",
32
+ "<|hu|>",
33
+ "<|ta|>",
34
+ "<|no|>",
35
+ "<|th|>",
36
+ "<|ur|>",
37
+ "<|hr|>",
38
+ "<|bg|>",
39
+ "<|lt|>",
40
+ "<|la|>",
41
+ "<|mi|>",
42
+ "<|ml|>",
43
+ "<|cy|>",
44
+ "<|sk|>",
45
+ "<|te|>",
46
+ "<|fa|>",
47
+ "<|lv|>",
48
+ "<|bn|>",
49
+ "<|sr|>",
50
+ "<|az|>",
51
+ "<|sl|>",
52
+ "<|kn|>",
53
+ "<|et|>",
54
+ "<|mk|>",
55
+ "<|br|>",
56
+ "<|eu|>",
57
+ "<|is|>",
58
+ "<|hy|>",
59
+ "<|ne|>",
60
+ "<|mn|>",
61
+ "<|bs|>",
62
+ "<|kk|>",
63
+ "<|sq|>",
64
+ "<|sw|>",
65
+ "<|gl|>",
66
+ "<|mr|>",
67
+ "<|pa|>",
68
+ "<|si|>",
69
+ "<|km|>",
70
+ "<|sn|>",
71
+ "<|yo|>",
72
+ "<|so|>",
73
+ "<|af|>",
74
+ "<|oc|>",
75
+ "<|ka|>",
76
+ "<|be|>",
77
+ "<|tg|>",
78
+ "<|sd|>",
79
+ "<|gu|>",
80
+ "<|am|>",
81
+ "<|yi|>",
82
+ "<|lo|>",
83
+ "<|uz|>",
84
+ "<|fo|>",
85
+ "<|ht|>",
86
+ "<|ps|>",
87
+ "<|tk|>",
88
+ "<|nn|>",
89
+ "<|mt|>",
90
+ "<|sa|>",
91
+ "<|lb|>",
92
+ "<|my|>",
93
+ "<|bo|>",
94
+ "<|tl|>",
95
+ "<|mg|>",
96
+ "<|as|>",
97
+ "<|tt|>",
98
+ "<|haw|>",
99
+ "<|ln|>",
100
+ "<|ha|>",
101
+ "<|ba|>",
102
+ "<|jw|>",
103
+ "<|su|>",
104
+ "<|translate|>",
105
+ "<|transcribe|>",
106
+ "<|startoflm|>",
107
+ "<|startofprev|>",
108
+ "<|nocaptions|>",
109
+ "<|notimestamps|>"
110
+ ],
111
+ "bos_token": {
112
+ "content": "<|endoftext|>",
113
+ "lstrip": false,
114
+ "normalized": true,
115
+ "rstrip": false,
116
+ "single_word": false
117
+ },
118
+ "eos_token": {
119
+ "content": "<|endoftext|>",
120
+ "lstrip": false,
121
+ "normalized": true,
122
+ "rstrip": false,
123
+ "single_word": false
124
+ },
125
+ "pad_token": "<|endoftext|>",
126
+ "unk_token": {
127
+ "content": "<|endoftext|>",
128
+ "lstrip": false,
129
+ "normalized": true,
130
+ "rstrip": false,
131
+ "single_word": false
132
+ }
133
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "bos_token": {
5
+ "__type": "AddedToken",
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "eos_token": {
13
+ "__type": "AddedToken",
14
+ "content": "<|endoftext|>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "errors": "replace",
21
+ "model_max_length": 1024,
22
+ "pad_token": null,
23
+ "processor_class": "WhisperProcessor",
24
+ "return_attention_mask": false,
25
+ "special_tokens_map_file": null,
26
+ "tokenizer_class": "WhisperTokenizer",
27
+ "unk_token": {
28
+ "__type": "AddedToken",
29
+ "content": "<|endoftext|>",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false
34
+ }
35
+ }
train_results.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "train_loss": 0.051935346220949447,
4
+ "train_runtime": 527672.5015,
5
+ "train_samples_per_second": 1.899,
6
+ "train_steps_per_second": 0.119
7
+ }
trainer_state.json ADDED
@@ -0,0 +1,4237 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 7.136237256719184,
3
+ "best_model_checkpoint": "CHECKPOINTS/checkpoint-59160",
4
+ "epoch": 1.0000319284802044,
5
+ "global_step": 62640,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.0,
12
+ "learning_rate": 1.9310344827586207e-06,
13
+ "loss": 2.0236,
14
+ "step": 116
15
+ },
16
+ {
17
+ "epoch": 0.0,
18
+ "learning_rate": 3.931034482758621e-06,
19
+ "loss": 0.5343,
20
+ "step": 232
21
+ },
22
+ {
23
+ "epoch": 0.01,
24
+ "learning_rate": 5.9310344827586205e-06,
25
+ "loss": 0.4203,
26
+ "step": 348
27
+ },
28
+ {
29
+ "epoch": 0.01,
30
+ "learning_rate": 7.93103448275862e-06,
31
+ "loss": 0.3676,
32
+ "step": 464
33
+ },
34
+ {
35
+ "epoch": 0.01,
36
+ "learning_rate": 9.931034482758622e-06,
37
+ "loss": 0.3277,
38
+ "step": 580
39
+ },
40
+ {
41
+ "epoch": 0.01,
42
+ "eval_loss": 0.42336705327033997,
43
+ "eval_runtime": 820.7641,
44
+ "eval_samples_per_second": 2.088,
45
+ "eval_steps_per_second": 0.262,
46
+ "eval_wer": 58.4491813407476,
47
+ "step": 580
48
+ },
49
+ {
50
+ "epoch": 0.01,
51
+ "learning_rate": 9.981952948759265e-06,
52
+ "loss": 0.3055,
53
+ "step": 696
54
+ },
55
+ {
56
+ "epoch": 0.01,
57
+ "learning_rate": 9.963261359974218e-06,
58
+ "loss": 0.2803,
59
+ "step": 812
60
+ },
61
+ {
62
+ "epoch": 0.01,
63
+ "learning_rate": 9.944569771189173e-06,
64
+ "loss": 0.2546,
65
+ "step": 928
66
+ },
67
+ {
68
+ "epoch": 0.02,
69
+ "learning_rate": 9.925878182404126e-06,
70
+ "loss": 0.2365,
71
+ "step": 1044
72
+ },
73
+ {
74
+ "epoch": 0.02,
75
+ "learning_rate": 9.90718659361908e-06,
76
+ "loss": 0.2238,
77
+ "step": 1160
78
+ },
79
+ {
80
+ "epoch": 0.02,
81
+ "eval_loss": 0.3460919260978699,
82
+ "eval_runtime": 799.3208,
83
+ "eval_samples_per_second": 2.144,
84
+ "eval_steps_per_second": 0.269,
85
+ "eval_wer": 42.02450828956853,
86
+ "step": 1160
87
+ },
88
+ {
89
+ "epoch": 0.02,
90
+ "learning_rate": 9.888495004834032e-06,
91
+ "loss": 0.2156,
92
+ "step": 1276
93
+ },
94
+ {
95
+ "epoch": 0.02,
96
+ "learning_rate": 9.869803416048985e-06,
97
+ "loss": 0.2058,
98
+ "step": 1392
99
+ },
100
+ {
101
+ "epoch": 0.02,
102
+ "learning_rate": 9.85111182726394e-06,
103
+ "loss": 0.1976,
104
+ "step": 1508
105
+ },
106
+ {
107
+ "epoch": 0.03,
108
+ "learning_rate": 9.832420238478893e-06,
109
+ "loss": 0.1788,
110
+ "step": 1624
111
+ },
112
+ {
113
+ "epoch": 0.03,
114
+ "learning_rate": 9.813728649693846e-06,
115
+ "loss": 0.1861,
116
+ "step": 1740
117
+ },
118
+ {
119
+ "epoch": 0.03,
120
+ "eval_loss": 0.2978155016899109,
121
+ "eval_runtime": 789.7073,
122
+ "eval_samples_per_second": 2.17,
123
+ "eval_steps_per_second": 0.272,
124
+ "eval_wer": 33.662856554422824,
125
+ "step": 1740
126
+ },
127
+ {
128
+ "epoch": 0.03,
129
+ "learning_rate": 9.795037060908799e-06,
130
+ "loss": 0.174,
131
+ "step": 1856
132
+ },
133
+ {
134
+ "epoch": 0.03,
135
+ "learning_rate": 9.776345472123752e-06,
136
+ "loss": 0.1739,
137
+ "step": 1972
138
+ },
139
+ {
140
+ "epoch": 0.03,
141
+ "learning_rate": 9.757653883338705e-06,
142
+ "loss": 0.1683,
143
+ "step": 2088
144
+ },
145
+ {
146
+ "epoch": 0.04,
147
+ "learning_rate": 9.738962294553658e-06,
148
+ "loss": 0.1606,
149
+ "step": 2204
150
+ },
151
+ {
152
+ "epoch": 0.04,
153
+ "learning_rate": 9.720270705768611e-06,
154
+ "loss": 0.1572,
155
+ "step": 2320
156
+ },
157
+ {
158
+ "epoch": 0.04,
159
+ "eval_loss": 0.27300673723220825,
160
+ "eval_runtime": 786.9048,
161
+ "eval_samples_per_second": 2.178,
162
+ "eval_steps_per_second": 0.273,
163
+ "eval_wer": 34.06446298012563,
164
+ "step": 2320
165
+ },
166
+ {
167
+ "epoch": 0.04,
168
+ "learning_rate": 9.701579116983564e-06,
169
+ "loss": 0.1498,
170
+ "step": 2436
171
+ },
172
+ {
173
+ "epoch": 0.04,
174
+ "learning_rate": 9.682887528198517e-06,
175
+ "loss": 0.1475,
176
+ "step": 2552
177
+ },
178
+ {
179
+ "epoch": 0.04,
180
+ "learning_rate": 9.664195939413472e-06,
181
+ "loss": 0.1421,
182
+ "step": 2668
183
+ },
184
+ {
185
+ "epoch": 0.04,
186
+ "learning_rate": 9.645504350628425e-06,
187
+ "loss": 0.1416,
188
+ "step": 2784
189
+ },
190
+ {
191
+ "epoch": 0.05,
192
+ "learning_rate": 9.626812761843378e-06,
193
+ "loss": 0.1416,
194
+ "step": 2900
195
+ },
196
+ {
197
+ "epoch": 0.05,
198
+ "eval_loss": 0.2491539865732193,
199
+ "eval_runtime": 790.6324,
200
+ "eval_samples_per_second": 2.168,
201
+ "eval_steps_per_second": 0.272,
202
+ "eval_wer": 33.508392544537124,
203
+ "step": 2900
204
+ },
205
+ {
206
+ "epoch": 0.05,
207
+ "learning_rate": 9.608121173058331e-06,
208
+ "loss": 0.1359,
209
+ "step": 3016
210
+ },
211
+ {
212
+ "epoch": 0.05,
213
+ "learning_rate": 9.589429584273284e-06,
214
+ "loss": 0.1324,
215
+ "step": 3132
216
+ },
217
+ {
218
+ "epoch": 0.05,
219
+ "learning_rate": 9.570737995488239e-06,
220
+ "loss": 0.1388,
221
+ "step": 3248
222
+ },
223
+ {
224
+ "epoch": 0.05,
225
+ "learning_rate": 9.552046406703192e-06,
226
+ "loss": 0.1353,
227
+ "step": 3364
228
+ },
229
+ {
230
+ "epoch": 0.06,
231
+ "learning_rate": 9.533354817918145e-06,
232
+ "loss": 0.1293,
233
+ "step": 3480
234
+ },
235
+ {
236
+ "epoch": 0.06,
237
+ "eval_loss": 0.24061298370361328,
238
+ "eval_runtime": 785.4078,
239
+ "eval_samples_per_second": 2.182,
240
+ "eval_steps_per_second": 0.274,
241
+ "eval_wer": 32.01524044897539,
242
+ "step": 3480
243
+ },
244
+ {
245
+ "epoch": 0.06,
246
+ "learning_rate": 9.514663229133098e-06,
247
+ "loss": 0.1293,
248
+ "step": 3596
249
+ },
250
+ {
251
+ "epoch": 0.06,
252
+ "learning_rate": 9.49597164034805e-06,
253
+ "loss": 0.1234,
254
+ "step": 3712
255
+ },
256
+ {
257
+ "epoch": 0.06,
258
+ "learning_rate": 9.477280051563005e-06,
259
+ "loss": 0.1165,
260
+ "step": 3828
261
+ },
262
+ {
263
+ "epoch": 0.06,
264
+ "learning_rate": 9.458588462777958e-06,
265
+ "loss": 0.1221,
266
+ "step": 3944
267
+ },
268
+ {
269
+ "epoch": 0.06,
270
+ "learning_rate": 9.439896873992912e-06,
271
+ "loss": 0.1212,
272
+ "step": 4060
273
+ },
274
+ {
275
+ "epoch": 0.06,
276
+ "eval_loss": 0.24251143634319305,
277
+ "eval_runtime": 789.4621,
278
+ "eval_samples_per_second": 2.171,
279
+ "eval_steps_per_second": 0.272,
280
+ "eval_wer": 31.366491607455465,
281
+ "step": 4060
282
+ },
283
+ {
284
+ "epoch": 0.07,
285
+ "learning_rate": 9.421205285207865e-06,
286
+ "loss": 0.121,
287
+ "step": 4176
288
+ },
289
+ {
290
+ "epoch": 0.07,
291
+ "learning_rate": 9.402513696422818e-06,
292
+ "loss": 0.1118,
293
+ "step": 4292
294
+ },
295
+ {
296
+ "epoch": 0.07,
297
+ "learning_rate": 9.38382210763777e-06,
298
+ "loss": 0.1093,
299
+ "step": 4408
300
+ },
301
+ {
302
+ "epoch": 0.07,
303
+ "learning_rate": 9.365130518852724e-06,
304
+ "loss": 0.1155,
305
+ "step": 4524
306
+ },
307
+ {
308
+ "epoch": 0.07,
309
+ "learning_rate": 9.346438930067677e-06,
310
+ "loss": 0.1102,
311
+ "step": 4640
312
+ },
313
+ {
314
+ "epoch": 0.07,
315
+ "eval_loss": 0.22580939531326294,
316
+ "eval_runtime": 787.0585,
317
+ "eval_samples_per_second": 2.178,
318
+ "eval_steps_per_second": 0.273,
319
+ "eval_wer": 30.810421171866953,
320
+ "step": 4640
321
+ },
322
+ {
323
+ "epoch": 0.08,
324
+ "learning_rate": 9.32774734128263e-06,
325
+ "loss": 0.1057,
326
+ "step": 4756
327
+ },
328
+ {
329
+ "epoch": 0.08,
330
+ "learning_rate": 9.309055752497583e-06,
331
+ "loss": 0.1053,
332
+ "step": 4872
333
+ },
334
+ {
335
+ "epoch": 0.08,
336
+ "learning_rate": 9.290364163712537e-06,
337
+ "loss": 0.1007,
338
+ "step": 4988
339
+ },
340
+ {
341
+ "epoch": 0.08,
342
+ "learning_rate": 9.27167257492749e-06,
343
+ "loss": 0.1076,
344
+ "step": 5104
345
+ },
346
+ {
347
+ "epoch": 0.08,
348
+ "learning_rate": 9.252980986142443e-06,
349
+ "loss": 0.1058,
350
+ "step": 5220
351
+ },
352
+ {
353
+ "epoch": 0.08,
354
+ "eval_loss": 0.21304036676883698,
355
+ "eval_runtime": 789.6831,
356
+ "eval_samples_per_second": 2.17,
357
+ "eval_steps_per_second": 0.272,
358
+ "eval_wer": 27.66965297085779,
359
+ "step": 5220
360
+ },
361
+ {
362
+ "epoch": 0.09,
363
+ "learning_rate": 9.234289397357396e-06,
364
+ "loss": 0.1027,
365
+ "step": 5336
366
+ },
367
+ {
368
+ "epoch": 0.09,
369
+ "learning_rate": 9.21559780857235e-06,
370
+ "loss": 0.1013,
371
+ "step": 5452
372
+ },
373
+ {
374
+ "epoch": 0.09,
375
+ "learning_rate": 9.196906219787304e-06,
376
+ "loss": 0.1011,
377
+ "step": 5568
378
+ },
379
+ {
380
+ "epoch": 0.09,
381
+ "learning_rate": 9.178214631002257e-06,
382
+ "loss": 0.0987,
383
+ "step": 5684
384
+ },
385
+ {
386
+ "epoch": 0.09,
387
+ "learning_rate": 9.15952304221721e-06,
388
+ "loss": 0.0981,
389
+ "step": 5800
390
+ },
391
+ {
392
+ "epoch": 0.09,
393
+ "eval_loss": 0.20962001383304596,
394
+ "eval_runtime": 788.0578,
395
+ "eval_samples_per_second": 2.175,
396
+ "eval_steps_per_second": 0.273,
397
+ "eval_wer": 30.151374729687987,
398
+ "step": 5800
399
+ },
400
+ {
401
+ "epoch": 0.09,
402
+ "learning_rate": 9.140831453432163e-06,
403
+ "loss": 0.0984,
404
+ "step": 5916
405
+ },
406
+ {
407
+ "epoch": 0.1,
408
+ "learning_rate": 9.122139864647116e-06,
409
+ "loss": 0.0928,
410
+ "step": 6032
411
+ },
412
+ {
413
+ "epoch": 0.1,
414
+ "learning_rate": 9.10344827586207e-06,
415
+ "loss": 0.095,
416
+ "step": 6148
417
+ },
418
+ {
419
+ "epoch": 0.1,
420
+ "learning_rate": 9.084756687077024e-06,
421
+ "loss": 0.0988,
422
+ "step": 6264
423
+ },
424
+ {
425
+ "epoch": 0.1,
426
+ "learning_rate": 9.066065098291977e-06,
427
+ "loss": 0.0987,
428
+ "step": 6380
429
+ },
430
+ {
431
+ "epoch": 0.1,
432
+ "eval_loss": 0.2030366212129593,
433
+ "eval_runtime": 791.7115,
434
+ "eval_samples_per_second": 2.165,
435
+ "eval_steps_per_second": 0.272,
436
+ "eval_wer": 29.23488827103285,
437
+ "step": 6380
438
+ },
439
+ {
440
+ "epoch": 0.1,
441
+ "learning_rate": 9.04737350950693e-06,
442
+ "loss": 0.0889,
443
+ "step": 6496
444
+ },
445
+ {
446
+ "epoch": 0.11,
447
+ "learning_rate": 9.028681920721883e-06,
448
+ "loss": 0.0866,
449
+ "step": 6612
450
+ },
451
+ {
452
+ "epoch": 0.11,
453
+ "learning_rate": 9.009990331936836e-06,
454
+ "loss": 0.088,
455
+ "step": 6728
456
+ },
457
+ {
458
+ "epoch": 0.11,
459
+ "learning_rate": 8.991298743151789e-06,
460
+ "loss": 0.0886,
461
+ "step": 6844
462
+ },
463
+ {
464
+ "epoch": 0.11,
465
+ "learning_rate": 8.972607154366742e-06,
466
+ "loss": 0.0879,
467
+ "step": 6960
468
+ },
469
+ {
470
+ "epoch": 0.11,
471
+ "eval_loss": 0.2006106823682785,
472
+ "eval_runtime": 788.215,
473
+ "eval_samples_per_second": 2.175,
474
+ "eval_steps_per_second": 0.273,
475
+ "eval_wer": 26.814952116156938,
476
+ "step": 6960
477
+ },
478
+ {
479
+ "epoch": 0.11,
480
+ "learning_rate": 8.953915565581695e-06,
481
+ "loss": 0.0865,
482
+ "step": 7076
483
+ },
484
+ {
485
+ "epoch": 0.11,
486
+ "learning_rate": 8.935223976796648e-06,
487
+ "loss": 0.0881,
488
+ "step": 7192
489
+ },
490
+ {
491
+ "epoch": 0.12,
492
+ "learning_rate": 8.916532388011603e-06,
493
+ "loss": 0.0872,
494
+ "step": 7308
495
+ },
496
+ {
497
+ "epoch": 0.12,
498
+ "learning_rate": 8.897840799226556e-06,
499
+ "loss": 0.0848,
500
+ "step": 7424
501
+ },
502
+ {
503
+ "epoch": 0.12,
504
+ "learning_rate": 8.879149210441509e-06,
505
+ "loss": 0.0817,
506
+ "step": 7540
507
+ },
508
+ {
509
+ "epoch": 0.12,
510
+ "eval_loss": 0.20483049750328064,
511
+ "eval_runtime": 791.5631,
512
+ "eval_samples_per_second": 2.165,
513
+ "eval_steps_per_second": 0.272,
514
+ "eval_wer": 28.89506744928432,
515
+ "step": 7540
516
+ },
517
+ {
518
+ "epoch": 0.12,
519
+ "learning_rate": 8.860457621656462e-06,
520
+ "loss": 0.0831,
521
+ "step": 7656
522
+ },
523
+ {
524
+ "epoch": 0.12,
525
+ "learning_rate": 8.841766032871415e-06,
526
+ "loss": 0.0847,
527
+ "step": 7772
528
+ },
529
+ {
530
+ "epoch": 0.13,
531
+ "learning_rate": 8.82307444408637e-06,
532
+ "loss": 0.0864,
533
+ "step": 7888
534
+ },
535
+ {
536
+ "epoch": 0.13,
537
+ "learning_rate": 8.804382855301323e-06,
538
+ "loss": 0.0812,
539
+ "step": 8004
540
+ },
541
+ {
542
+ "epoch": 0.13,
543
+ "learning_rate": 8.785691266516276e-06,
544
+ "loss": 0.0806,
545
+ "step": 8120
546
+ },
547
+ {
548
+ "epoch": 0.13,
549
+ "eval_loss": 0.19857698678970337,
550
+ "eval_runtime": 798.8422,
551
+ "eval_samples_per_second": 2.146,
552
+ "eval_steps_per_second": 0.269,
553
+ "eval_wer": 29.821851508598495,
554
+ "step": 8120
555
+ },
556
+ {
557
+ "epoch": 0.13,
558
+ "learning_rate": 8.767160812117308e-06,
559
+ "loss": 0.0772,
560
+ "step": 8236
561
+ },
562
+ {
563
+ "epoch": 0.13,
564
+ "learning_rate": 8.74846922333226e-06,
565
+ "loss": 0.082,
566
+ "step": 8352
567
+ },
568
+ {
569
+ "epoch": 0.14,
570
+ "learning_rate": 8.729777634547214e-06,
571
+ "loss": 0.0782,
572
+ "step": 8468
573
+ },
574
+ {
575
+ "epoch": 0.14,
576
+ "learning_rate": 8.711086045762167e-06,
577
+ "loss": 0.0797,
578
+ "step": 8584
579
+ },
580
+ {
581
+ "epoch": 0.14,
582
+ "learning_rate": 8.69239445697712e-06,
583
+ "loss": 0.0766,
584
+ "step": 8700
585
+ },
586
+ {
587
+ "epoch": 0.14,
588
+ "eval_loss": 0.19560863077640533,
589
+ "eval_runtime": 782.2802,
590
+ "eval_samples_per_second": 2.191,
591
+ "eval_steps_per_second": 0.275,
592
+ "eval_wer": 28.998043455874782,
593
+ "step": 8700
594
+ },
595
+ {
596
+ "epoch": 0.14,
597
+ "learning_rate": 8.673702868192073e-06,
598
+ "loss": 0.0811,
599
+ "step": 8816
600
+ },
601
+ {
602
+ "epoch": 0.14,
603
+ "learning_rate": 8.655011279407026e-06,
604
+ "loss": 0.0754,
605
+ "step": 8932
606
+ },
607
+ {
608
+ "epoch": 0.14,
609
+ "learning_rate": 8.636319690621979e-06,
610
+ "loss": 0.078,
611
+ "step": 9048
612
+ },
613
+ {
614
+ "epoch": 0.15,
615
+ "learning_rate": 8.617628101836932e-06,
616
+ "loss": 0.0799,
617
+ "step": 9164
618
+ },
619
+ {
620
+ "epoch": 0.15,
621
+ "learning_rate": 8.598936513051886e-06,
622
+ "loss": 0.0758,
623
+ "step": 9280
624
+ },
625
+ {
626
+ "epoch": 0.15,
627
+ "eval_loss": 0.1869840919971466,
628
+ "eval_runtime": 788.0306,
629
+ "eval_samples_per_second": 2.175,
630
+ "eval_steps_per_second": 0.273,
631
+ "eval_wer": 27.031201729996912,
632
+ "step": 9280
633
+ },
634
+ {
635
+ "epoch": 0.15,
636
+ "learning_rate": 8.58024492426684e-06,
637
+ "loss": 0.0687,
638
+ "step": 9396
639
+ },
640
+ {
641
+ "epoch": 0.15,
642
+ "learning_rate": 8.561553335481792e-06,
643
+ "loss": 0.0758,
644
+ "step": 9512
645
+ },
646
+ {
647
+ "epoch": 0.15,
648
+ "learning_rate": 8.542861746696746e-06,
649
+ "loss": 0.0739,
650
+ "step": 9628
651
+ },
652
+ {
653
+ "epoch": 0.16,
654
+ "learning_rate": 8.524170157911699e-06,
655
+ "loss": 0.0735,
656
+ "step": 9744
657
+ },
658
+ {
659
+ "epoch": 0.16,
660
+ "learning_rate": 8.505478569126653e-06,
661
+ "loss": 0.0749,
662
+ "step": 9860
663
+ },
664
+ {
665
+ "epoch": 0.16,
666
+ "eval_loss": 0.18276962637901306,
667
+ "eval_runtime": 787.0753,
668
+ "eval_samples_per_second": 2.178,
669
+ "eval_steps_per_second": 0.273,
670
+ "eval_wer": 29.63649469673566,
671
+ "step": 9860
672
+ },
673
+ {
674
+ "epoch": 0.16,
675
+ "learning_rate": 8.486786980341606e-06,
676
+ "loss": 0.0742,
677
+ "step": 9976
678
+ },
679
+ {
680
+ "epoch": 0.16,
681
+ "learning_rate": 8.46809539155656e-06,
682
+ "loss": 0.0702,
683
+ "step": 10092
684
+ },
685
+ {
686
+ "epoch": 0.16,
687
+ "learning_rate": 8.449403802771512e-06,
688
+ "loss": 0.0693,
689
+ "step": 10208
690
+ },
691
+ {
692
+ "epoch": 0.16,
693
+ "learning_rate": 8.430712213986465e-06,
694
+ "loss": 0.0725,
695
+ "step": 10324
696
+ },
697
+ {
698
+ "epoch": 0.17,
699
+ "learning_rate": 8.412181759587497e-06,
700
+ "loss": 0.0713,
701
+ "step": 10440
702
+ },
703
+ {
704
+ "epoch": 0.17,
705
+ "eval_loss": 0.18643251061439514,
706
+ "eval_runtime": 785.1719,
707
+ "eval_samples_per_second": 2.183,
708
+ "eval_steps_per_second": 0.274,
709
+ "eval_wer": 25.98084646277417,
710
+ "step": 10440
711
+ },
712
+ {
713
+ "epoch": 0.17,
714
+ "learning_rate": 8.39349017080245e-06,
715
+ "loss": 0.0734,
716
+ "step": 10556
717
+ },
718
+ {
719
+ "epoch": 0.17,
720
+ "learning_rate": 8.374798582017403e-06,
721
+ "loss": 0.072,
722
+ "step": 10672
723
+ },
724
+ {
725
+ "epoch": 0.17,
726
+ "learning_rate": 8.356106993232356e-06,
727
+ "loss": 0.0704,
728
+ "step": 10788
729
+ },
730
+ {
731
+ "epoch": 0.17,
732
+ "learning_rate": 8.33741540444731e-06,
733
+ "loss": 0.0684,
734
+ "step": 10904
735
+ },
736
+ {
737
+ "epoch": 0.18,
738
+ "learning_rate": 8.318884950048341e-06,
739
+ "loss": 0.0634,
740
+ "step": 11020
741
+ },
742
+ {
743
+ "epoch": 0.18,
744
+ "eval_loss": 0.18063540756702423,
745
+ "eval_runtime": 785.7442,
746
+ "eval_samples_per_second": 2.181,
747
+ "eval_steps_per_second": 0.274,
748
+ "eval_wer": 26.681083307589333,
749
+ "step": 11020
750
+ },
751
+ {
752
+ "epoch": 0.18,
753
+ "learning_rate": 8.300193361263294e-06,
754
+ "loss": 0.0681,
755
+ "step": 11136
756
+ },
757
+ {
758
+ "epoch": 0.18,
759
+ "learning_rate": 8.281501772478247e-06,
760
+ "loss": 0.0677,
761
+ "step": 11252
762
+ },
763
+ {
764
+ "epoch": 0.18,
765
+ "learning_rate": 8.2628101836932e-06,
766
+ "loss": 0.0701,
767
+ "step": 11368
768
+ },
769
+ {
770
+ "epoch": 0.18,
771
+ "learning_rate": 8.244118594908153e-06,
772
+ "loss": 0.0648,
773
+ "step": 11484
774
+ },
775
+ {
776
+ "epoch": 0.19,
777
+ "learning_rate": 8.225427006123108e-06,
778
+ "loss": 0.0682,
779
+ "step": 11600
780
+ },
781
+ {
782
+ "epoch": 0.19,
783
+ "eval_loss": 0.1769612729549408,
784
+ "eval_runtime": 783.3099,
785
+ "eval_samples_per_second": 2.188,
786
+ "eval_steps_per_second": 0.274,
787
+ "eval_wer": 26.71197610956647,
788
+ "step": 11600
789
+ },
790
+ {
791
+ "epoch": 0.19,
792
+ "learning_rate": 8.20673541733806e-06,
793
+ "loss": 0.065,
794
+ "step": 11716
795
+ },
796
+ {
797
+ "epoch": 0.19,
798
+ "learning_rate": 8.188043828553014e-06,
799
+ "loss": 0.0648,
800
+ "step": 11832
801
+ },
802
+ {
803
+ "epoch": 0.19,
804
+ "learning_rate": 8.169352239767967e-06,
805
+ "loss": 0.0659,
806
+ "step": 11948
807
+ },
808
+ {
809
+ "epoch": 0.19,
810
+ "learning_rate": 8.15066065098292e-06,
811
+ "loss": 0.0672,
812
+ "step": 12064
813
+ },
814
+ {
815
+ "epoch": 0.19,
816
+ "learning_rate": 8.131969062197875e-06,
817
+ "loss": 0.0607,
818
+ "step": 12180
819
+ },
820
+ {
821
+ "epoch": 0.19,
822
+ "eval_loss": 0.1819377839565277,
823
+ "eval_runtime": 783.0783,
824
+ "eval_samples_per_second": 2.189,
825
+ "eval_steps_per_second": 0.275,
826
+ "eval_wer": 26.990011327360726,
827
+ "step": 12180
828
+ },
829
+ {
830
+ "epoch": 0.2,
831
+ "learning_rate": 8.113277473412828e-06,
832
+ "loss": 0.0672,
833
+ "step": 12296
834
+ },
835
+ {
836
+ "epoch": 0.2,
837
+ "learning_rate": 8.09458588462778e-06,
838
+ "loss": 0.0602,
839
+ "step": 12412
840
+ },
841
+ {
842
+ "epoch": 0.2,
843
+ "learning_rate": 8.075894295842734e-06,
844
+ "loss": 0.0638,
845
+ "step": 12528
846
+ },
847
+ {
848
+ "epoch": 0.2,
849
+ "learning_rate": 8.057202707057687e-06,
850
+ "loss": 0.0635,
851
+ "step": 12644
852
+ },
853
+ {
854
+ "epoch": 0.2,
855
+ "learning_rate": 8.038511118272641e-06,
856
+ "loss": 0.0635,
857
+ "step": 12760
858
+ },
859
+ {
860
+ "epoch": 0.2,
861
+ "eval_loss": 0.17338888347148895,
862
+ "eval_runtime": 785.414,
863
+ "eval_samples_per_second": 2.182,
864
+ "eval_steps_per_second": 0.274,
865
+ "eval_wer": 26.578107300998866,
866
+ "step": 12760
867
+ },
868
+ {
869
+ "epoch": 0.21,
870
+ "learning_rate": 8.019819529487594e-06,
871
+ "loss": 0.0656,
872
+ "step": 12876
873
+ },
874
+ {
875
+ "epoch": 0.21,
876
+ "learning_rate": 8.001127940702547e-06,
877
+ "loss": 0.0608,
878
+ "step": 12992
879
+ },
880
+ {
881
+ "epoch": 0.21,
882
+ "learning_rate": 7.9824363519175e-06,
883
+ "loss": 0.0588,
884
+ "step": 13108
885
+ },
886
+ {
887
+ "epoch": 0.21,
888
+ "learning_rate": 7.963744763132453e-06,
889
+ "loss": 0.058,
890
+ "step": 13224
891
+ },
892
+ {
893
+ "epoch": 0.21,
894
+ "learning_rate": 7.945053174347406e-06,
895
+ "loss": 0.061,
896
+ "step": 13340
897
+ },
898
+ {
899
+ "epoch": 0.21,
900
+ "eval_loss": 0.16947728395462036,
901
+ "eval_runtime": 785.6425,
902
+ "eval_samples_per_second": 2.182,
903
+ "eval_steps_per_second": 0.274,
904
+ "eval_wer": 27.6490577695397,
905
+ "step": 13340
906
+ },
907
+ {
908
+ "epoch": 0.21,
909
+ "learning_rate": 7.92636158556236e-06,
910
+ "loss": 0.0579,
911
+ "step": 13456
912
+ },
913
+ {
914
+ "epoch": 0.22,
915
+ "learning_rate": 7.907669996777313e-06,
916
+ "loss": 0.0639,
917
+ "step": 13572
918
+ },
919
+ {
920
+ "epoch": 0.22,
921
+ "learning_rate": 7.888978407992266e-06,
922
+ "loss": 0.0625,
923
+ "step": 13688
924
+ },
925
+ {
926
+ "epoch": 0.22,
927
+ "learning_rate": 7.870286819207219e-06,
928
+ "loss": 0.0611,
929
+ "step": 13804
930
+ },
931
+ {
932
+ "epoch": 0.22,
933
+ "learning_rate": 7.851595230422173e-06,
934
+ "loss": 0.0623,
935
+ "step": 13920
936
+ },
937
+ {
938
+ "epoch": 0.22,
939
+ "eval_loss": 0.16918495297431946,
940
+ "eval_runtime": 781.9171,
941
+ "eval_samples_per_second": 2.192,
942
+ "eval_steps_per_second": 0.275,
943
+ "eval_wer": 27.422510555040674,
944
+ "step": 13920
945
+ },
946
+ {
947
+ "epoch": 0.22,
948
+ "learning_rate": 7.832903641637126e-06,
949
+ "loss": 0.056,
950
+ "step": 14036
951
+ },
952
+ {
953
+ "epoch": 0.23,
954
+ "learning_rate": 7.81421205285208e-06,
955
+ "loss": 0.0563,
956
+ "step": 14152
957
+ },
958
+ {
959
+ "epoch": 0.23,
960
+ "learning_rate": 7.795520464067032e-06,
961
+ "loss": 0.054,
962
+ "step": 14268
963
+ },
964
+ {
965
+ "epoch": 0.23,
966
+ "learning_rate": 7.776828875281985e-06,
967
+ "loss": 0.0536,
968
+ "step": 14384
969
+ },
970
+ {
971
+ "epoch": 0.23,
972
+ "learning_rate": 7.75813728649694e-06,
973
+ "loss": 0.0583,
974
+ "step": 14500
975
+ },
976
+ {
977
+ "epoch": 0.23,
978
+ "eval_loss": 0.17885711789131165,
979
+ "eval_runtime": 782.2797,
980
+ "eval_samples_per_second": 2.191,
981
+ "eval_steps_per_second": 0.275,
982
+ "eval_wer": 27.679950571516837,
983
+ "step": 14500
984
+ },
985
+ {
986
+ "epoch": 0.23,
987
+ "learning_rate": 7.739445697711893e-06,
988
+ "loss": 0.0562,
989
+ "step": 14616
990
+ },
991
+ {
992
+ "epoch": 0.24,
993
+ "learning_rate": 7.720754108926846e-06,
994
+ "loss": 0.0586,
995
+ "step": 14732
996
+ },
997
+ {
998
+ "epoch": 0.24,
999
+ "learning_rate": 7.702062520141799e-06,
1000
+ "loss": 0.056,
1001
+ "step": 14848
1002
+ },
1003
+ {
1004
+ "epoch": 0.24,
1005
+ "learning_rate": 7.683370931356752e-06,
1006
+ "loss": 0.0508,
1007
+ "step": 14964
1008
+ },
1009
+ {
1010
+ "epoch": 0.24,
1011
+ "learning_rate": 7.664679342571705e-06,
1012
+ "loss": 0.0594,
1013
+ "step": 15080
1014
+ },
1015
+ {
1016
+ "epoch": 0.24,
1017
+ "eval_loss": 0.16561517119407654,
1018
+ "eval_runtime": 794.3578,
1019
+ "eval_samples_per_second": 2.158,
1020
+ "eval_steps_per_second": 0.271,
1021
+ "eval_wer": 25.12614560807332,
1022
+ "step": 15080
1023
+ },
1024
+ {
1025
+ "epoch": 0.24,
1026
+ "learning_rate": 7.645987753786658e-06,
1027
+ "loss": 0.0535,
1028
+ "step": 15196
1029
+ },
1030
+ {
1031
+ "epoch": 0.24,
1032
+ "learning_rate": 7.627296165001612e-06,
1033
+ "loss": 0.0554,
1034
+ "step": 15312
1035
+ },
1036
+ {
1037
+ "epoch": 0.25,
1038
+ "learning_rate": 7.608604576216565e-06,
1039
+ "loss": 0.0514,
1040
+ "step": 15428
1041
+ },
1042
+ {
1043
+ "epoch": 0.25,
1044
+ "learning_rate": 7.589912987431518e-06,
1045
+ "loss": 0.0537,
1046
+ "step": 15544
1047
+ },
1048
+ {
1049
+ "epoch": 0.25,
1050
+ "learning_rate": 7.571221398646472e-06,
1051
+ "loss": 0.0548,
1052
+ "step": 15660
1053
+ },
1054
+ {
1055
+ "epoch": 0.25,
1056
+ "eval_loss": 0.1662958413362503,
1057
+ "eval_runtime": 783.8667,
1058
+ "eval_samples_per_second": 2.187,
1059
+ "eval_steps_per_second": 0.274,
1060
+ "eval_wer": 25.90876325816085,
1061
+ "step": 15660
1062
+ },
1063
+ {
1064
+ "epoch": 0.25,
1065
+ "learning_rate": 7.552529809861425e-06,
1066
+ "loss": 0.0562,
1067
+ "step": 15776
1068
+ },
1069
+ {
1070
+ "epoch": 0.25,
1071
+ "learning_rate": 7.533838221076378e-06,
1072
+ "loss": 0.0579,
1073
+ "step": 15892
1074
+ },
1075
+ {
1076
+ "epoch": 0.26,
1077
+ "learning_rate": 7.515146632291331e-06,
1078
+ "loss": 0.0563,
1079
+ "step": 16008
1080
+ },
1081
+ {
1082
+ "epoch": 0.26,
1083
+ "learning_rate": 7.496455043506284e-06,
1084
+ "loss": 0.0508,
1085
+ "step": 16124
1086
+ },
1087
+ {
1088
+ "epoch": 0.26,
1089
+ "learning_rate": 7.477763454721239e-06,
1090
+ "loss": 0.0546,
1091
+ "step": 16240
1092
+ },
1093
+ {
1094
+ "epoch": 0.26,
1095
+ "eval_loss": 0.15785543620586395,
1096
+ "eval_runtime": 781.6726,
1097
+ "eval_samples_per_second": 2.193,
1098
+ "eval_steps_per_second": 0.275,
1099
+ "eval_wer": 26.289774482545567,
1100
+ "step": 16240
1101
+ },
1102
+ {
1103
+ "epoch": 0.26,
1104
+ "learning_rate": 7.459071865936192e-06,
1105
+ "loss": 0.0496,
1106
+ "step": 16356
1107
+ },
1108
+ {
1109
+ "epoch": 0.26,
1110
+ "learning_rate": 7.440380277151145e-06,
1111
+ "loss": 0.0531,
1112
+ "step": 16472
1113
+ },
1114
+ {
1115
+ "epoch": 0.26,
1116
+ "learning_rate": 7.421849822752177e-06,
1117
+ "loss": 0.0523,
1118
+ "step": 16588
1119
+ },
1120
+ {
1121
+ "epoch": 0.27,
1122
+ "learning_rate": 7.40315823396713e-06,
1123
+ "loss": 0.0565,
1124
+ "step": 16704
1125
+ },
1126
+ {
1127
+ "epoch": 0.27,
1128
+ "learning_rate": 7.384466645182083e-06,
1129
+ "loss": 0.0547,
1130
+ "step": 16820
1131
+ },
1132
+ {
1133
+ "epoch": 0.27,
1134
+ "eval_loss": 0.16302894055843353,
1135
+ "eval_runtime": 785.7904,
1136
+ "eval_samples_per_second": 2.181,
1137
+ "eval_steps_per_second": 0.274,
1138
+ "eval_wer": 27.020904129337865,
1139
+ "step": 16820
1140
+ },
1141
+ {
1142
+ "epoch": 0.27,
1143
+ "learning_rate": 7.365775056397036e-06,
1144
+ "loss": 0.0532,
1145
+ "step": 16936
1146
+ },
1147
+ {
1148
+ "epoch": 0.27,
1149
+ "learning_rate": 7.3470834676119895e-06,
1150
+ "loss": 0.0557,
1151
+ "step": 17052
1152
+ },
1153
+ {
1154
+ "epoch": 0.27,
1155
+ "learning_rate": 7.3283918788269426e-06,
1156
+ "loss": 0.0529,
1157
+ "step": 17168
1158
+ },
1159
+ {
1160
+ "epoch": 0.28,
1161
+ "learning_rate": 7.309700290041896e-06,
1162
+ "loss": 0.0516,
1163
+ "step": 17284
1164
+ },
1165
+ {
1166
+ "epoch": 0.28,
1167
+ "learning_rate": 7.291008701256849e-06,
1168
+ "loss": 0.0543,
1169
+ "step": 17400
1170
+ },
1171
+ {
1172
+ "epoch": 0.28,
1173
+ "eval_loss": 0.16456177830696106,
1174
+ "eval_runtime": 783.0024,
1175
+ "eval_samples_per_second": 2.189,
1176
+ "eval_steps_per_second": 0.275,
1177
+ "eval_wer": 26.444238492431264,
1178
+ "step": 17400
1179
+ },
1180
+ {
1181
+ "epoch": 0.28,
1182
+ "learning_rate": 7.272317112471802e-06,
1183
+ "loss": 0.054,
1184
+ "step": 17516
1185
+ },
1186
+ {
1187
+ "epoch": 0.28,
1188
+ "learning_rate": 7.2536255236867555e-06,
1189
+ "loss": 0.0515,
1190
+ "step": 17632
1191
+ },
1192
+ {
1193
+ "epoch": 0.28,
1194
+ "learning_rate": 7.2349339349017085e-06,
1195
+ "loss": 0.0564,
1196
+ "step": 17748
1197
+ },
1198
+ {
1199
+ "epoch": 0.29,
1200
+ "learning_rate": 7.2162423461166616e-06,
1201
+ "loss": 0.0523,
1202
+ "step": 17864
1203
+ },
1204
+ {
1205
+ "epoch": 0.29,
1206
+ "learning_rate": 7.197550757331615e-06,
1207
+ "loss": 0.0496,
1208
+ "step": 17980
1209
+ },
1210
+ {
1211
+ "epoch": 0.29,
1212
+ "eval_loss": 0.16263148188591003,
1213
+ "eval_runtime": 783.5956,
1214
+ "eval_samples_per_second": 2.187,
1215
+ "eval_steps_per_second": 0.274,
1216
+ "eval_wer": 23.96251673360107,
1217
+ "step": 17980
1218
+ },
1219
+ {
1220
+ "epoch": 0.29,
1221
+ "learning_rate": 7.1788591685465684e-06,
1222
+ "loss": 0.051,
1223
+ "step": 18096
1224
+ },
1225
+ {
1226
+ "epoch": 0.29,
1227
+ "learning_rate": 7.160167579761522e-06,
1228
+ "loss": 0.0546,
1229
+ "step": 18212
1230
+ },
1231
+ {
1232
+ "epoch": 0.29,
1233
+ "learning_rate": 7.141475990976475e-06,
1234
+ "loss": 0.0486,
1235
+ "step": 18328
1236
+ },
1237
+ {
1238
+ "epoch": 0.29,
1239
+ "learning_rate": 7.122784402191428e-06,
1240
+ "loss": 0.0502,
1241
+ "step": 18444
1242
+ },
1243
+ {
1244
+ "epoch": 0.3,
1245
+ "learning_rate": 7.104092813406381e-06,
1246
+ "loss": 0.0494,
1247
+ "step": 18560
1248
+ },
1249
+ {
1250
+ "epoch": 0.3,
1251
+ "eval_loss": 0.15693338215351105,
1252
+ "eval_runtime": 786.4458,
1253
+ "eval_samples_per_second": 2.179,
1254
+ "eval_steps_per_second": 0.273,
1255
+ "eval_wer": 18.69014519616929,
1256
+ "step": 18560
1257
+ },
1258
+ {
1259
+ "epoch": 0.3,
1260
+ "learning_rate": 7.085401224621334e-06,
1261
+ "loss": 0.0482,
1262
+ "step": 18676
1263
+ },
1264
+ {
1265
+ "epoch": 0.3,
1266
+ "learning_rate": 7.066709635836288e-06,
1267
+ "loss": 0.0487,
1268
+ "step": 18792
1269
+ },
1270
+ {
1271
+ "epoch": 0.3,
1272
+ "learning_rate": 7.048018047051241e-06,
1273
+ "loss": 0.0462,
1274
+ "step": 18908
1275
+ },
1276
+ {
1277
+ "epoch": 0.3,
1278
+ "learning_rate": 7.029326458266194e-06,
1279
+ "loss": 0.0488,
1280
+ "step": 19024
1281
+ },
1282
+ {
1283
+ "epoch": 0.31,
1284
+ "learning_rate": 7.010634869481147e-06,
1285
+ "loss": 0.0477,
1286
+ "step": 19140
1287
+ },
1288
+ {
1289
+ "epoch": 0.31,
1290
+ "eval_loss": 0.15665055811405182,
1291
+ "eval_runtime": 789.4798,
1292
+ "eval_samples_per_second": 2.171,
1293
+ "eval_steps_per_second": 0.272,
1294
+ "eval_wer": 26.176500875296053,
1295
+ "step": 19140
1296
+ },
1297
+ {
1298
+ "epoch": 0.31,
1299
+ "learning_rate": 6.9919432806961e-06,
1300
+ "loss": 0.0505,
1301
+ "step": 19256
1302
+ },
1303
+ {
1304
+ "epoch": 0.31,
1305
+ "learning_rate": 6.973251691911055e-06,
1306
+ "loss": 0.0488,
1307
+ "step": 19372
1308
+ },
1309
+ {
1310
+ "epoch": 0.31,
1311
+ "learning_rate": 6.954560103126008e-06,
1312
+ "loss": 0.0487,
1313
+ "step": 19488
1314
+ },
1315
+ {
1316
+ "epoch": 0.31,
1317
+ "learning_rate": 6.935868514340961e-06,
1318
+ "loss": 0.0454,
1319
+ "step": 19604
1320
+ },
1321
+ {
1322
+ "epoch": 0.31,
1323
+ "learning_rate": 6.917176925555914e-06,
1324
+ "loss": 0.0439,
1325
+ "step": 19720
1326
+ },
1327
+ {
1328
+ "epoch": 0.31,
1329
+ "eval_loss": 0.15991590917110443,
1330
+ "eval_runtime": 784.4279,
1331
+ "eval_samples_per_second": 2.185,
1332
+ "eval_steps_per_second": 0.274,
1333
+ "eval_wer": 26.7840593141798,
1334
+ "step": 19720
1335
+ },
1336
+ {
1337
+ "epoch": 0.32,
1338
+ "learning_rate": 6.898485336770867e-06,
1339
+ "loss": 0.0462,
1340
+ "step": 19836
1341
+ },
1342
+ {
1343
+ "epoch": 0.32,
1344
+ "learning_rate": 6.879954882371899e-06,
1345
+ "loss": 0.0524,
1346
+ "step": 19952
1347
+ },
1348
+ {
1349
+ "epoch": 0.32,
1350
+ "learning_rate": 6.861263293586852e-06,
1351
+ "loss": 0.0506,
1352
+ "step": 20068
1353
+ },
1354
+ {
1355
+ "epoch": 0.32,
1356
+ "learning_rate": 6.842571704801805e-06,
1357
+ "loss": 0.0461,
1358
+ "step": 20184
1359
+ },
1360
+ {
1361
+ "epoch": 0.32,
1362
+ "learning_rate": 6.823880116016759e-06,
1363
+ "loss": 0.0465,
1364
+ "step": 20300
1365
+ },
1366
+ {
1367
+ "epoch": 0.32,
1368
+ "eval_loss": 0.14981767535209656,
1369
+ "eval_runtime": 785.1354,
1370
+ "eval_samples_per_second": 2.183,
1371
+ "eval_steps_per_second": 0.274,
1372
+ "eval_wer": 25.692513644320876,
1373
+ "step": 20300
1374
+ },
1375
+ {
1376
+ "epoch": 0.33,
1377
+ "learning_rate": 6.805188527231712e-06,
1378
+ "loss": 0.0449,
1379
+ "step": 20416
1380
+ },
1381
+ {
1382
+ "epoch": 0.33,
1383
+ "learning_rate": 6.786496938446665e-06,
1384
+ "loss": 0.0451,
1385
+ "step": 20532
1386
+ },
1387
+ {
1388
+ "epoch": 0.33,
1389
+ "learning_rate": 6.767805349661618e-06,
1390
+ "loss": 0.0478,
1391
+ "step": 20648
1392
+ },
1393
+ {
1394
+ "epoch": 0.33,
1395
+ "learning_rate": 6.749113760876571e-06,
1396
+ "loss": 0.0431,
1397
+ "step": 20764
1398
+ },
1399
+ {
1400
+ "epoch": 0.33,
1401
+ "learning_rate": 6.730422172091526e-06,
1402
+ "loss": 0.0439,
1403
+ "step": 20880
1404
+ },
1405
+ {
1406
+ "epoch": 0.33,
1407
+ "eval_loss": 0.1557285338640213,
1408
+ "eval_runtime": 784.9439,
1409
+ "eval_samples_per_second": 2.184,
1410
+ "eval_steps_per_second": 0.274,
1411
+ "eval_wer": 26.403048089795078,
1412
+ "step": 20880
1413
+ },
1414
+ {
1415
+ "epoch": 0.34,
1416
+ "learning_rate": 6.711730583306479e-06,
1417
+ "loss": 0.0485,
1418
+ "step": 20996
1419
+ },
1420
+ {
1421
+ "epoch": 0.34,
1422
+ "learning_rate": 6.693038994521432e-06,
1423
+ "loss": 0.0435,
1424
+ "step": 21112
1425
+ },
1426
+ {
1427
+ "epoch": 0.34,
1428
+ "learning_rate": 6.674347405736385e-06,
1429
+ "loss": 0.0389,
1430
+ "step": 21228
1431
+ },
1432
+ {
1433
+ "epoch": 0.34,
1434
+ "learning_rate": 6.655655816951338e-06,
1435
+ "loss": 0.0444,
1436
+ "step": 21344
1437
+ },
1438
+ {
1439
+ "epoch": 0.34,
1440
+ "learning_rate": 6.636964228166292e-06,
1441
+ "loss": 0.0438,
1442
+ "step": 21460
1443
+ },
1444
+ {
1445
+ "epoch": 0.34,
1446
+ "eval_loss": 0.15771810710430145,
1447
+ "eval_runtime": 783.9278,
1448
+ "eval_samples_per_second": 2.186,
1449
+ "eval_steps_per_second": 0.274,
1450
+ "eval_wer": 26.51632169704459,
1451
+ "step": 21460
1452
+ },
1453
+ {
1454
+ "epoch": 0.34,
1455
+ "learning_rate": 6.618272639381245e-06,
1456
+ "loss": 0.0439,
1457
+ "step": 21576
1458
+ },
1459
+ {
1460
+ "epoch": 0.35,
1461
+ "learning_rate": 6.599581050596198e-06,
1462
+ "loss": 0.0458,
1463
+ "step": 21692
1464
+ },
1465
+ {
1466
+ "epoch": 0.35,
1467
+ "learning_rate": 6.580889461811151e-06,
1468
+ "loss": 0.0472,
1469
+ "step": 21808
1470
+ },
1471
+ {
1472
+ "epoch": 0.35,
1473
+ "learning_rate": 6.562197873026104e-06,
1474
+ "loss": 0.0433,
1475
+ "step": 21924
1476
+ },
1477
+ {
1478
+ "epoch": 0.35,
1479
+ "learning_rate": 6.5435062842410576e-06,
1480
+ "loss": 0.0443,
1481
+ "step": 22040
1482
+ },
1483
+ {
1484
+ "epoch": 0.35,
1485
+ "eval_loss": 0.14860232174396515,
1486
+ "eval_runtime": 783.2253,
1487
+ "eval_samples_per_second": 2.188,
1488
+ "eval_steps_per_second": 0.275,
1489
+ "eval_wer": 25.96025126145608,
1490
+ "step": 22040
1491
+ },
1492
+ {
1493
+ "epoch": 0.35,
1494
+ "learning_rate": 6.5249758298420885e-06,
1495
+ "loss": 0.0409,
1496
+ "step": 22156
1497
+ },
1498
+ {
1499
+ "epoch": 0.36,
1500
+ "learning_rate": 6.506284241057042e-06,
1501
+ "loss": 0.0402,
1502
+ "step": 22272
1503
+ },
1504
+ {
1505
+ "epoch": 0.36,
1506
+ "learning_rate": 6.487592652271995e-06,
1507
+ "loss": 0.0432,
1508
+ "step": 22388
1509
+ },
1510
+ {
1511
+ "epoch": 0.36,
1512
+ "learning_rate": 6.468901063486948e-06,
1513
+ "loss": 0.0407,
1514
+ "step": 22504
1515
+ },
1516
+ {
1517
+ "epoch": 0.36,
1518
+ "learning_rate": 6.450209474701901e-06,
1519
+ "loss": 0.0431,
1520
+ "step": 22620
1521
+ },
1522
+ {
1523
+ "epoch": 0.36,
1524
+ "eval_loss": 0.15237173438072205,
1525
+ "eval_runtime": 782.7057,
1526
+ "eval_samples_per_second": 2.19,
1527
+ "eval_steps_per_second": 0.275,
1528
+ "eval_wer": 25.898465657501802,
1529
+ "step": 22620
1530
+ },
1531
+ {
1532
+ "epoch": 0.36,
1533
+ "learning_rate": 6.4315178859168544e-06,
1534
+ "loss": 0.0444,
1535
+ "step": 22736
1536
+ },
1537
+ {
1538
+ "epoch": 0.36,
1539
+ "learning_rate": 6.412826297131809e-06,
1540
+ "loss": 0.0436,
1541
+ "step": 22852
1542
+ },
1543
+ {
1544
+ "epoch": 0.37,
1545
+ "learning_rate": 6.394134708346762e-06,
1546
+ "loss": 0.0448,
1547
+ "step": 22968
1548
+ },
1549
+ {
1550
+ "epoch": 0.37,
1551
+ "learning_rate": 6.375443119561715e-06,
1552
+ "loss": 0.04,
1553
+ "step": 23084
1554
+ },
1555
+ {
1556
+ "epoch": 0.37,
1557
+ "learning_rate": 6.356751530776668e-06,
1558
+ "loss": 0.0406,
1559
+ "step": 23200
1560
+ },
1561
+ {
1562
+ "epoch": 0.37,
1563
+ "eval_loss": 0.15666086971759796,
1564
+ "eval_runtime": 787.7061,
1565
+ "eval_samples_per_second": 2.176,
1566
+ "eval_steps_per_second": 0.273,
1567
+ "eval_wer": 26.33096488518175,
1568
+ "step": 23200
1569
+ },
1570
+ {
1571
+ "epoch": 0.37,
1572
+ "learning_rate": 6.338059941991621e-06,
1573
+ "loss": 0.0436,
1574
+ "step": 23316
1575
+ },
1576
+ {
1577
+ "epoch": 0.37,
1578
+ "learning_rate": 6.319368353206575e-06,
1579
+ "loss": 0.0395,
1580
+ "step": 23432
1581
+ },
1582
+ {
1583
+ "epoch": 0.38,
1584
+ "learning_rate": 6.300676764421528e-06,
1585
+ "loss": 0.0368,
1586
+ "step": 23548
1587
+ },
1588
+ {
1589
+ "epoch": 0.38,
1590
+ "learning_rate": 6.281985175636481e-06,
1591
+ "loss": 0.0438,
1592
+ "step": 23664
1593
+ },
1594
+ {
1595
+ "epoch": 0.38,
1596
+ "learning_rate": 6.263293586851434e-06,
1597
+ "loss": 0.0406,
1598
+ "step": 23780
1599
+ },
1600
+ {
1601
+ "epoch": 0.38,
1602
+ "eval_loss": 0.15179598331451416,
1603
+ "eval_runtime": 785.8287,
1604
+ "eval_samples_per_second": 2.181,
1605
+ "eval_steps_per_second": 0.274,
1606
+ "eval_wer": 24.786324786324787,
1607
+ "step": 23780
1608
+ },
1609
+ {
1610
+ "epoch": 0.38,
1611
+ "learning_rate": 6.244601998066387e-06,
1612
+ "loss": 0.0444,
1613
+ "step": 23896
1614
+ },
1615
+ {
1616
+ "epoch": 0.38,
1617
+ "learning_rate": 6.225910409281342e-06,
1618
+ "loss": 0.0415,
1619
+ "step": 24012
1620
+ },
1621
+ {
1622
+ "epoch": 0.39,
1623
+ "learning_rate": 6.207218820496295e-06,
1624
+ "loss": 0.0454,
1625
+ "step": 24128
1626
+ },
1627
+ {
1628
+ "epoch": 0.39,
1629
+ "learning_rate": 6.188527231711248e-06,
1630
+ "loss": 0.0414,
1631
+ "step": 24244
1632
+ },
1633
+ {
1634
+ "epoch": 0.39,
1635
+ "learning_rate": 6.169835642926201e-06,
1636
+ "loss": 0.0405,
1637
+ "step": 24360
1638
+ },
1639
+ {
1640
+ "epoch": 0.39,
1641
+ "eval_loss": 0.14717231690883636,
1642
+ "eval_runtime": 783.1318,
1643
+ "eval_samples_per_second": 2.189,
1644
+ "eval_steps_per_second": 0.275,
1645
+ "eval_wer": 19.77139326536917,
1646
+ "step": 24360
1647
+ },
1648
+ {
1649
+ "epoch": 0.39,
1650
+ "learning_rate": 6.151305188527233e-06,
1651
+ "loss": 0.0419,
1652
+ "step": 24476
1653
+ },
1654
+ {
1655
+ "epoch": 0.39,
1656
+ "learning_rate": 6.132613599742186e-06,
1657
+ "loss": 0.0397,
1658
+ "step": 24592
1659
+ },
1660
+ {
1661
+ "epoch": 0.39,
1662
+ "learning_rate": 6.113922010957139e-06,
1663
+ "loss": 0.0389,
1664
+ "step": 24708
1665
+ },
1666
+ {
1667
+ "epoch": 0.4,
1668
+ "learning_rate": 6.095230422172092e-06,
1669
+ "loss": 0.0374,
1670
+ "step": 24824
1671
+ },
1672
+ {
1673
+ "epoch": 0.4,
1674
+ "learning_rate": 6.076538833387046e-06,
1675
+ "loss": 0.0382,
1676
+ "step": 24940
1677
+ },
1678
+ {
1679
+ "epoch": 0.4,
1680
+ "eval_loss": 0.14440029859542847,
1681
+ "eval_runtime": 783.3674,
1682
+ "eval_samples_per_second": 2.188,
1683
+ "eval_steps_per_second": 0.274,
1684
+ "eval_wer": 20.656986922047164,
1685
+ "step": 24940
1686
+ },
1687
+ {
1688
+ "epoch": 0.4,
1689
+ "learning_rate": 6.057847244601999e-06,
1690
+ "loss": 0.0416,
1691
+ "step": 25056
1692
+ },
1693
+ {
1694
+ "epoch": 0.4,
1695
+ "learning_rate": 6.039155655816952e-06,
1696
+ "loss": 0.0422,
1697
+ "step": 25172
1698
+ },
1699
+ {
1700
+ "epoch": 0.4,
1701
+ "learning_rate": 6.020464067031905e-06,
1702
+ "loss": 0.0369,
1703
+ "step": 25288
1704
+ },
1705
+ {
1706
+ "epoch": 0.41,
1707
+ "learning_rate": 6.001772478246858e-06,
1708
+ "loss": 0.0414,
1709
+ "step": 25404
1710
+ },
1711
+ {
1712
+ "epoch": 0.41,
1713
+ "learning_rate": 5.983080889461812e-06,
1714
+ "loss": 0.0389,
1715
+ "step": 25520
1716
+ },
1717
+ {
1718
+ "epoch": 0.41,
1719
+ "eval_loss": 0.1415482610464096,
1720
+ "eval_runtime": 783.7026,
1721
+ "eval_samples_per_second": 2.187,
1722
+ "eval_steps_per_second": 0.274,
1723
+ "eval_wer": 21.944187004427967,
1724
+ "step": 25520
1725
+ },
1726
+ {
1727
+ "epoch": 0.41,
1728
+ "learning_rate": 5.964389300676765e-06,
1729
+ "loss": 0.0416,
1730
+ "step": 25636
1731
+ },
1732
+ {
1733
+ "epoch": 0.41,
1734
+ "learning_rate": 5.945697711891718e-06,
1735
+ "loss": 0.0386,
1736
+ "step": 25752
1737
+ },
1738
+ {
1739
+ "epoch": 0.41,
1740
+ "learning_rate": 5.927006123106671e-06,
1741
+ "loss": 0.0373,
1742
+ "step": 25868
1743
+ },
1744
+ {
1745
+ "epoch": 0.41,
1746
+ "learning_rate": 5.9083145343216254e-06,
1747
+ "loss": 0.0407,
1748
+ "step": 25984
1749
+ },
1750
+ {
1751
+ "epoch": 0.42,
1752
+ "learning_rate": 5.8896229455365785e-06,
1753
+ "loss": 0.0383,
1754
+ "step": 26100
1755
+ },
1756
+ {
1757
+ "epoch": 0.42,
1758
+ "eval_loss": 0.14374086260795593,
1759
+ "eval_runtime": 787.1,
1760
+ "eval_samples_per_second": 2.178,
1761
+ "eval_steps_per_second": 0.273,
1762
+ "eval_wer": 21.408711770157556,
1763
+ "step": 26100
1764
+ },
1765
+ {
1766
+ "epoch": 0.42,
1767
+ "learning_rate": 5.8709313567515315e-06,
1768
+ "loss": 0.0405,
1769
+ "step": 26216
1770
+ },
1771
+ {
1772
+ "epoch": 0.42,
1773
+ "learning_rate": 5.8522397679664845e-06,
1774
+ "loss": 0.0414,
1775
+ "step": 26332
1776
+ },
1777
+ {
1778
+ "epoch": 0.42,
1779
+ "learning_rate": 5.8335481791814375e-06,
1780
+ "loss": 0.0398,
1781
+ "step": 26448
1782
+ },
1783
+ {
1784
+ "epoch": 0.42,
1785
+ "learning_rate": 5.815017724782469e-06,
1786
+ "loss": 0.0421,
1787
+ "step": 26564
1788
+ },
1789
+ {
1790
+ "epoch": 0.43,
1791
+ "learning_rate": 5.796326135997422e-06,
1792
+ "loss": 0.036,
1793
+ "step": 26680
1794
+ },
1795
+ {
1796
+ "epoch": 0.43,
1797
+ "eval_loss": 0.14346691966056824,
1798
+ "eval_runtime": 787.9907,
1799
+ "eval_samples_per_second": 2.175,
1800
+ "eval_steps_per_second": 0.273,
1801
+ "eval_wer": 21.50139017608897,
1802
+ "step": 26680
1803
+ },
1804
+ {
1805
+ "epoch": 0.43,
1806
+ "learning_rate": 5.777634547212375e-06,
1807
+ "loss": 0.0383,
1808
+ "step": 26796
1809
+ },
1810
+ {
1811
+ "epoch": 0.43,
1812
+ "learning_rate": 5.758942958427329e-06,
1813
+ "loss": 0.0399,
1814
+ "step": 26912
1815
+ },
1816
+ {
1817
+ "epoch": 0.43,
1818
+ "learning_rate": 5.740251369642282e-06,
1819
+ "loss": 0.0368,
1820
+ "step": 27028
1821
+ },
1822
+ {
1823
+ "epoch": 0.43,
1824
+ "learning_rate": 5.721559780857235e-06,
1825
+ "loss": 0.0386,
1826
+ "step": 27144
1827
+ },
1828
+ {
1829
+ "epoch": 0.44,
1830
+ "learning_rate": 5.702868192072188e-06,
1831
+ "loss": 0.0357,
1832
+ "step": 27260
1833
+ },
1834
+ {
1835
+ "epoch": 0.44,
1836
+ "eval_loss": 0.1420992761850357,
1837
+ "eval_runtime": 784.1914,
1838
+ "eval_samples_per_second": 2.186,
1839
+ "eval_steps_per_second": 0.274,
1840
+ "eval_wer": 23.663886314488725,
1841
+ "step": 27260
1842
+ },
1843
+ {
1844
+ "epoch": 0.44,
1845
+ "learning_rate": 5.684176603287141e-06,
1846
+ "loss": 0.0395,
1847
+ "step": 27376
1848
+ },
1849
+ {
1850
+ "epoch": 0.44,
1851
+ "learning_rate": 5.665485014502096e-06,
1852
+ "loss": 0.0348,
1853
+ "step": 27492
1854
+ },
1855
+ {
1856
+ "epoch": 0.44,
1857
+ "learning_rate": 5.646793425717049e-06,
1858
+ "loss": 0.0361,
1859
+ "step": 27608
1860
+ },
1861
+ {
1862
+ "epoch": 0.44,
1863
+ "learning_rate": 5.628101836932002e-06,
1864
+ "loss": 0.0369,
1865
+ "step": 27724
1866
+ },
1867
+ {
1868
+ "epoch": 0.44,
1869
+ "learning_rate": 5.609410248146955e-06,
1870
+ "loss": 0.0369,
1871
+ "step": 27840
1872
+ },
1873
+ {
1874
+ "epoch": 0.44,
1875
+ "eval_loss": 0.14183476567268372,
1876
+ "eval_runtime": 784.9371,
1877
+ "eval_samples_per_second": 2.184,
1878
+ "eval_steps_per_second": 0.274,
1879
+ "eval_wer": 23.911028730305837,
1880
+ "step": 27840
1881
+ },
1882
+ {
1883
+ "epoch": 0.45,
1884
+ "learning_rate": 5.590718659361908e-06,
1885
+ "loss": 0.0372,
1886
+ "step": 27956
1887
+ },
1888
+ {
1889
+ "epoch": 0.45,
1890
+ "learning_rate": 5.572027070576862e-06,
1891
+ "loss": 0.0355,
1892
+ "step": 28072
1893
+ },
1894
+ {
1895
+ "epoch": 0.45,
1896
+ "learning_rate": 5.553335481791815e-06,
1897
+ "loss": 0.0354,
1898
+ "step": 28188
1899
+ },
1900
+ {
1901
+ "epoch": 0.45,
1902
+ "learning_rate": 5.534643893006768e-06,
1903
+ "loss": 0.0342,
1904
+ "step": 28304
1905
+ },
1906
+ {
1907
+ "epoch": 0.45,
1908
+ "learning_rate": 5.515952304221721e-06,
1909
+ "loss": 0.035,
1910
+ "step": 28420
1911
+ },
1912
+ {
1913
+ "epoch": 0.45,
1914
+ "eval_loss": 0.13896532356739044,
1915
+ "eval_runtime": 785.0487,
1916
+ "eval_samples_per_second": 2.183,
1917
+ "eval_steps_per_second": 0.274,
1918
+ "eval_wer": 24.88930079291525,
1919
+ "step": 28420
1920
+ },
1921
+ {
1922
+ "epoch": 0.46,
1923
+ "learning_rate": 5.497260715436674e-06,
1924
+ "loss": 0.0338,
1925
+ "step": 28536
1926
+ },
1927
+ {
1928
+ "epoch": 0.46,
1929
+ "learning_rate": 5.478569126651628e-06,
1930
+ "loss": 0.0364,
1931
+ "step": 28652
1932
+ },
1933
+ {
1934
+ "epoch": 0.46,
1935
+ "learning_rate": 5.459877537866582e-06,
1936
+ "loss": 0.0368,
1937
+ "step": 28768
1938
+ },
1939
+ {
1940
+ "epoch": 0.46,
1941
+ "learning_rate": 5.441185949081535e-06,
1942
+ "loss": 0.0328,
1943
+ "step": 28884
1944
+ },
1945
+ {
1946
+ "epoch": 0.46,
1947
+ "learning_rate": 5.422494360296488e-06,
1948
+ "loss": 0.0368,
1949
+ "step": 29000
1950
+ },
1951
+ {
1952
+ "epoch": 0.46,
1953
+ "eval_loss": 0.14057199656963348,
1954
+ "eval_runtime": 793.8154,
1955
+ "eval_samples_per_second": 2.159,
1956
+ "eval_steps_per_second": 0.271,
1957
+ "eval_wer": 23.334363093399237,
1958
+ "step": 29000
1959
+ },
1960
+ {
1961
+ "epoch": 0.46,
1962
+ "learning_rate": 5.403802771511441e-06,
1963
+ "loss": 0.0366,
1964
+ "step": 29116
1965
+ },
1966
+ {
1967
+ "epoch": 0.47,
1968
+ "learning_rate": 5.385111182726395e-06,
1969
+ "loss": 0.0336,
1970
+ "step": 29232
1971
+ },
1972
+ {
1973
+ "epoch": 0.47,
1974
+ "learning_rate": 5.366419593941348e-06,
1975
+ "loss": 0.0343,
1976
+ "step": 29348
1977
+ },
1978
+ {
1979
+ "epoch": 0.47,
1980
+ "learning_rate": 5.347728005156301e-06,
1981
+ "loss": 0.0334,
1982
+ "step": 29464
1983
+ },
1984
+ {
1985
+ "epoch": 0.47,
1986
+ "learning_rate": 5.329036416371254e-06,
1987
+ "loss": 0.0323,
1988
+ "step": 29580
1989
+ },
1990
+ {
1991
+ "epoch": 0.47,
1992
+ "eval_loss": 0.1342051774263382,
1993
+ "eval_runtime": 783.5326,
1994
+ "eval_samples_per_second": 2.188,
1995
+ "eval_steps_per_second": 0.274,
1996
+ "eval_wer": 21.975079806405105,
1997
+ "step": 29580
1998
+ },
1999
+ {
2000
+ "epoch": 0.47,
2001
+ "learning_rate": 5.310344827586207e-06,
2002
+ "loss": 0.0349,
2003
+ "step": 29696
2004
+ },
2005
+ {
2006
+ "epoch": 0.48,
2007
+ "learning_rate": 5.291653238801161e-06,
2008
+ "loss": 0.0311,
2009
+ "step": 29812
2010
+ },
2011
+ {
2012
+ "epoch": 0.48,
2013
+ "learning_rate": 5.272961650016114e-06,
2014
+ "loss": 0.0322,
2015
+ "step": 29928
2016
+ },
2017
+ {
2018
+ "epoch": 0.48,
2019
+ "learning_rate": 5.254270061231067e-06,
2020
+ "loss": 0.0341,
2021
+ "step": 30044
2022
+ },
2023
+ {
2024
+ "epoch": 0.48,
2025
+ "learning_rate": 5.2368675475346446e-06,
2026
+ "loss": 0.1425,
2027
+ "step": 30160
2028
+ },
2029
+ {
2030
+ "epoch": 0.48,
2031
+ "eval_loss": 0.1378883421421051,
2032
+ "eval_runtime": 787.4376,
2033
+ "eval_samples_per_second": 2.177,
2034
+ "eval_steps_per_second": 0.273,
2035
+ "eval_wer": 15.343424981979197,
2036
+ "step": 30160
2037
+ },
2038
+ {
2039
+ "epoch": 0.48,
2040
+ "learning_rate": 5.218175958749598e-06,
2041
+ "loss": 0.0322,
2042
+ "step": 30276
2043
+ },
2044
+ {
2045
+ "epoch": 0.49,
2046
+ "learning_rate": 5.1994843699645515e-06,
2047
+ "loss": 0.0351,
2048
+ "step": 30392
2049
+ },
2050
+ {
2051
+ "epoch": 0.49,
2052
+ "learning_rate": 5.1807927811795045e-06,
2053
+ "loss": 0.0362,
2054
+ "step": 30508
2055
+ },
2056
+ {
2057
+ "epoch": 0.49,
2058
+ "learning_rate": 5.1621011923944575e-06,
2059
+ "loss": 0.0333,
2060
+ "step": 30624
2061
+ },
2062
+ {
2063
+ "epoch": 0.49,
2064
+ "learning_rate": 5.1434096036094105e-06,
2065
+ "loss": 0.0324,
2066
+ "step": 30740
2067
+ },
2068
+ {
2069
+ "epoch": 0.49,
2070
+ "eval_loss": 0.1346246302127838,
2071
+ "eval_runtime": 783.3435,
2072
+ "eval_samples_per_second": 2.188,
2073
+ "eval_steps_per_second": 0.274,
2074
+ "eval_wer": 10.513850272886417,
2075
+ "step": 30740
2076
+ },
2077
+ {
2078
+ "epoch": 0.49,
2079
+ "learning_rate": 5.1247180148243635e-06,
2080
+ "loss": 0.035,
2081
+ "step": 30856
2082
+ },
2083
+ {
2084
+ "epoch": 0.49,
2085
+ "learning_rate": 5.106026426039317e-06,
2086
+ "loss": 0.0353,
2087
+ "step": 30972
2088
+ },
2089
+ {
2090
+ "epoch": 0.5,
2091
+ "learning_rate": 5.0873348372542704e-06,
2092
+ "loss": 0.0315,
2093
+ "step": 31088
2094
+ },
2095
+ {
2096
+ "epoch": 0.5,
2097
+ "learning_rate": 5.0686432484692235e-06,
2098
+ "loss": 0.0355,
2099
+ "step": 31204
2100
+ },
2101
+ {
2102
+ "epoch": 0.5,
2103
+ "learning_rate": 5.0499516596841765e-06,
2104
+ "loss": 0.0303,
2105
+ "step": 31320
2106
+ },
2107
+ {
2108
+ "epoch": 0.5,
2109
+ "eval_loss": 0.13406488299369812,
2110
+ "eval_runtime": 785.1703,
2111
+ "eval_samples_per_second": 2.183,
2112
+ "eval_steps_per_second": 0.274,
2113
+ "eval_wer": 10.24611265575121,
2114
+ "step": 31320
2115
+ },
2116
+ {
2117
+ "epoch": 0.5,
2118
+ "learning_rate": 5.03126007089913e-06,
2119
+ "loss": 0.0326,
2120
+ "step": 31436
2121
+ },
2122
+ {
2123
+ "epoch": 0.5,
2124
+ "learning_rate": 5.012568482114084e-06,
2125
+ "loss": 0.0303,
2126
+ "step": 31552
2127
+ },
2128
+ {
2129
+ "epoch": 0.51,
2130
+ "learning_rate": 4.993876893329036e-06,
2131
+ "loss": 0.0342,
2132
+ "step": 31668
2133
+ },
2134
+ {
2135
+ "epoch": 0.51,
2136
+ "learning_rate": 4.97518530454399e-06,
2137
+ "loss": 0.0297,
2138
+ "step": 31784
2139
+ },
2140
+ {
2141
+ "epoch": 0.51,
2142
+ "learning_rate": 4.956493715758943e-06,
2143
+ "loss": 0.0298,
2144
+ "step": 31900
2145
+ },
2146
+ {
2147
+ "epoch": 0.51,
2148
+ "eval_loss": 0.13156923651695251,
2149
+ "eval_runtime": 788.5054,
2150
+ "eval_samples_per_second": 2.174,
2151
+ "eval_steps_per_second": 0.273,
2152
+ "eval_wer": 9.535578210277006,
2153
+ "step": 31900
2154
+ },
2155
+ {
2156
+ "epoch": 0.51,
2157
+ "learning_rate": 4.937802126973897e-06,
2158
+ "loss": 0.0333,
2159
+ "step": 32016
2160
+ },
2161
+ {
2162
+ "epoch": 0.51,
2163
+ "learning_rate": 4.91911053818885e-06,
2164
+ "loss": 0.0314,
2165
+ "step": 32132
2166
+ },
2167
+ {
2168
+ "epoch": 0.51,
2169
+ "learning_rate": 4.900418949403803e-06,
2170
+ "loss": 0.0303,
2171
+ "step": 32248
2172
+ },
2173
+ {
2174
+ "epoch": 0.52,
2175
+ "learning_rate": 4.881727360618756e-06,
2176
+ "loss": 0.0332,
2177
+ "step": 32364
2178
+ },
2179
+ {
2180
+ "epoch": 0.52,
2181
+ "learning_rate": 4.863035771833709e-06,
2182
+ "loss": 0.0308,
2183
+ "step": 32480
2184
+ },
2185
+ {
2186
+ "epoch": 0.52,
2187
+ "eval_loss": 0.13265329599380493,
2188
+ "eval_runtime": 782.9581,
2189
+ "eval_samples_per_second": 2.189,
2190
+ "eval_steps_per_second": 0.275,
2191
+ "eval_wer": 9.514983008958913,
2192
+ "step": 32480
2193
+ },
2194
+ {
2195
+ "epoch": 0.52,
2196
+ "learning_rate": 4.844344183048663e-06,
2197
+ "loss": 0.033,
2198
+ "step": 32596
2199
+ },
2200
+ {
2201
+ "epoch": 0.52,
2202
+ "learning_rate": 4.825652594263616e-06,
2203
+ "loss": 0.0328,
2204
+ "step": 32712
2205
+ },
2206
+ {
2207
+ "epoch": 0.52,
2208
+ "learning_rate": 4.806961005478569e-06,
2209
+ "loss": 0.0303,
2210
+ "step": 32828
2211
+ },
2212
+ {
2213
+ "epoch": 0.53,
2214
+ "learning_rate": 4.788269416693523e-06,
2215
+ "loss": 0.0297,
2216
+ "step": 32944
2217
+ },
2218
+ {
2219
+ "epoch": 0.53,
2220
+ "learning_rate": 4.769577827908476e-06,
2221
+ "loss": 0.0312,
2222
+ "step": 33060
2223
+ },
2224
+ {
2225
+ "epoch": 0.53,
2226
+ "eval_loss": 0.1312318742275238,
2227
+ "eval_runtime": 783.4807,
2228
+ "eval_samples_per_second": 2.188,
2229
+ "eval_steps_per_second": 0.274,
2230
+ "eval_wer": 10.019565441252189,
2231
+ "step": 33060
2232
+ },
2233
+ {
2234
+ "epoch": 0.53,
2235
+ "learning_rate": 4.75088623912343e-06,
2236
+ "loss": 0.0305,
2237
+ "step": 33176
2238
+ },
2239
+ {
2240
+ "epoch": 0.53,
2241
+ "learning_rate": 4.732194650338383e-06,
2242
+ "loss": 0.0306,
2243
+ "step": 33292
2244
+ },
2245
+ {
2246
+ "epoch": 0.53,
2247
+ "learning_rate": 4.713503061553336e-06,
2248
+ "loss": 0.0297,
2249
+ "step": 33408
2250
+ },
2251
+ {
2252
+ "epoch": 0.54,
2253
+ "learning_rate": 4.694811472768289e-06,
2254
+ "loss": 0.0287,
2255
+ "step": 33524
2256
+ },
2257
+ {
2258
+ "epoch": 0.54,
2259
+ "learning_rate": 4.676119883983242e-06,
2260
+ "loss": 0.0316,
2261
+ "step": 33640
2262
+ },
2263
+ {
2264
+ "epoch": 0.54,
2265
+ "eval_loss": 0.12888002395629883,
2266
+ "eval_runtime": 787.0374,
2267
+ "eval_samples_per_second": 2.178,
2268
+ "eval_steps_per_second": 0.273,
2269
+ "eval_wer": 9.298733395118937,
2270
+ "step": 33640
2271
+ },
2272
+ {
2273
+ "epoch": 0.54,
2274
+ "learning_rate": 4.657428295198196e-06,
2275
+ "loss": 0.0299,
2276
+ "step": 33756
2277
+ },
2278
+ {
2279
+ "epoch": 0.54,
2280
+ "learning_rate": 4.638736706413149e-06,
2281
+ "loss": 0.0305,
2282
+ "step": 33872
2283
+ },
2284
+ {
2285
+ "epoch": 0.54,
2286
+ "learning_rate": 4.620045117628103e-06,
2287
+ "loss": 0.0312,
2288
+ "step": 33988
2289
+ },
2290
+ {
2291
+ "epoch": 0.54,
2292
+ "learning_rate": 4.601353528843056e-06,
2293
+ "loss": 0.0294,
2294
+ "step": 34104
2295
+ },
2296
+ {
2297
+ "epoch": 0.55,
2298
+ "learning_rate": 4.582661940058009e-06,
2299
+ "loss": 0.0318,
2300
+ "step": 34220
2301
+ },
2302
+ {
2303
+ "epoch": 0.55,
2304
+ "eval_loss": 0.12854613363742828,
2305
+ "eval_runtime": 782.7642,
2306
+ "eval_samples_per_second": 2.19,
2307
+ "eval_steps_per_second": 0.275,
2308
+ "eval_wer": 9.309030995777984,
2309
+ "step": 34220
2310
+ },
2311
+ {
2312
+ "epoch": 0.55,
2313
+ "learning_rate": 4.563970351272962e-06,
2314
+ "loss": 0.0315,
2315
+ "step": 34336
2316
+ },
2317
+ {
2318
+ "epoch": 0.55,
2319
+ "learning_rate": 4.545278762487915e-06,
2320
+ "loss": 0.0299,
2321
+ "step": 34452
2322
+ },
2323
+ {
2324
+ "epoch": 0.55,
2325
+ "learning_rate": 4.526587173702869e-06,
2326
+ "loss": 0.029,
2327
+ "step": 34568
2328
+ },
2329
+ {
2330
+ "epoch": 0.55,
2331
+ "learning_rate": 4.507895584917822e-06,
2332
+ "loss": 0.0294,
2333
+ "step": 34684
2334
+ },
2335
+ {
2336
+ "epoch": 0.56,
2337
+ "learning_rate": 4.489203996132775e-06,
2338
+ "loss": 0.0305,
2339
+ "step": 34800
2340
+ },
2341
+ {
2342
+ "epoch": 0.56,
2343
+ "eval_loss": 0.12825001776218414,
2344
+ "eval_runtime": 786.5669,
2345
+ "eval_samples_per_second": 2.179,
2346
+ "eval_steps_per_second": 0.273,
2347
+ "eval_wer": 9.422304603027493,
2348
+ "step": 34800
2349
+ },
2350
+ {
2351
+ "epoch": 0.56,
2352
+ "learning_rate": 4.470512407347729e-06,
2353
+ "loss": 0.0278,
2354
+ "step": 34916
2355
+ },
2356
+ {
2357
+ "epoch": 0.56,
2358
+ "learning_rate": 4.451820818562682e-06,
2359
+ "loss": 0.0287,
2360
+ "step": 35032
2361
+ },
2362
+ {
2363
+ "epoch": 0.56,
2364
+ "learning_rate": 4.4331292297776355e-06,
2365
+ "loss": 0.0268,
2366
+ "step": 35148
2367
+ },
2368
+ {
2369
+ "epoch": 0.56,
2370
+ "learning_rate": 4.4144376409925886e-06,
2371
+ "loss": 0.0275,
2372
+ "step": 35264
2373
+ },
2374
+ {
2375
+ "epoch": 0.56,
2376
+ "learning_rate": 4.395746052207542e-06,
2377
+ "loss": 0.0279,
2378
+ "step": 35380
2379
+ },
2380
+ {
2381
+ "epoch": 0.56,
2382
+ "eval_loss": 0.12600964307785034,
2383
+ "eval_runtime": 794.742,
2384
+ "eval_samples_per_second": 2.157,
2385
+ "eval_steps_per_second": 0.271,
2386
+ "eval_wer": 9.14426938523324,
2387
+ "step": 35380
2388
+ },
2389
+ {
2390
+ "epoch": 0.57,
2391
+ "learning_rate": 4.377054463422495e-06,
2392
+ "loss": 0.0286,
2393
+ "step": 35496
2394
+ },
2395
+ {
2396
+ "epoch": 0.57,
2397
+ "learning_rate": 4.358362874637448e-06,
2398
+ "loss": 0.0276,
2399
+ "step": 35612
2400
+ },
2401
+ {
2402
+ "epoch": 0.57,
2403
+ "learning_rate": 4.3396712858524015e-06,
2404
+ "loss": 0.0314,
2405
+ "step": 35728
2406
+ },
2407
+ {
2408
+ "epoch": 0.57,
2409
+ "learning_rate": 4.3209796970673545e-06,
2410
+ "loss": 0.0336,
2411
+ "step": 35844
2412
+ },
2413
+ {
2414
+ "epoch": 0.57,
2415
+ "learning_rate": 4.3022881082823075e-06,
2416
+ "loss": 0.0253,
2417
+ "step": 35960
2418
+ },
2419
+ {
2420
+ "epoch": 0.57,
2421
+ "eval_loss": 0.12685632705688477,
2422
+ "eval_runtime": 785.8668,
2423
+ "eval_samples_per_second": 2.181,
2424
+ "eval_steps_per_second": 0.274,
2425
+ "eval_wer": 9.49438780764082,
2426
+ "step": 35960
2427
+ },
2428
+ {
2429
+ "epoch": 0.58,
2430
+ "learning_rate": 4.283596519497261e-06,
2431
+ "loss": 0.0276,
2432
+ "step": 36076
2433
+ },
2434
+ {
2435
+ "epoch": 0.58,
2436
+ "learning_rate": 4.2649049307122144e-06,
2437
+ "loss": 0.0283,
2438
+ "step": 36192
2439
+ },
2440
+ {
2441
+ "epoch": 0.58,
2442
+ "learning_rate": 4.2462133419271675e-06,
2443
+ "loss": 0.0313,
2444
+ "step": 36308
2445
+ },
2446
+ {
2447
+ "epoch": 0.58,
2448
+ "learning_rate": 4.2275217531421205e-06,
2449
+ "loss": 0.0281,
2450
+ "step": 36424
2451
+ },
2452
+ {
2453
+ "epoch": 0.58,
2454
+ "learning_rate": 4.2088301643570735e-06,
2455
+ "loss": 0.026,
2456
+ "step": 36540
2457
+ },
2458
+ {
2459
+ "epoch": 0.58,
2460
+ "eval_loss": 0.1278238743543625,
2461
+ "eval_runtime": 782.8112,
2462
+ "eval_samples_per_second": 2.19,
2463
+ "eval_steps_per_second": 0.275,
2464
+ "eval_wer": 9.597363814231285,
2465
+ "step": 36540
2466
+ },
2467
+ {
2468
+ "epoch": 0.59,
2469
+ "learning_rate": 4.190138575572027e-06,
2470
+ "loss": 0.0267,
2471
+ "step": 36656
2472
+ },
2473
+ {
2474
+ "epoch": 0.59,
2475
+ "learning_rate": 4.17144698678698e-06,
2476
+ "loss": 0.0237,
2477
+ "step": 36772
2478
+ },
2479
+ {
2480
+ "epoch": 0.59,
2481
+ "learning_rate": 4.152755398001934e-06,
2482
+ "loss": 0.0281,
2483
+ "step": 36888
2484
+ },
2485
+ {
2486
+ "epoch": 0.59,
2487
+ "learning_rate": 4.134063809216887e-06,
2488
+ "loss": 0.0287,
2489
+ "step": 37004
2490
+ },
2491
+ {
2492
+ "epoch": 0.59,
2493
+ "learning_rate": 4.11537222043184e-06,
2494
+ "loss": 0.0309,
2495
+ "step": 37120
2496
+ },
2497
+ {
2498
+ "epoch": 0.59,
2499
+ "eval_loss": 0.12141475081443787,
2500
+ "eval_runtime": 786.3376,
2501
+ "eval_samples_per_second": 2.18,
2502
+ "eval_steps_per_second": 0.273,
2503
+ "eval_wer": 9.257542992482753,
2504
+ "step": 37120
2505
+ },
2506
+ {
2507
+ "epoch": 0.59,
2508
+ "learning_rate": 4.096680631646794e-06,
2509
+ "loss": 0.0282,
2510
+ "step": 37236
2511
+ },
2512
+ {
2513
+ "epoch": 0.6,
2514
+ "learning_rate": 4.077989042861747e-06,
2515
+ "loss": 0.0274,
2516
+ "step": 37352
2517
+ },
2518
+ {
2519
+ "epoch": 0.6,
2520
+ "learning_rate": 4.0592974540767e-06,
2521
+ "loss": 0.0267,
2522
+ "step": 37468
2523
+ },
2524
+ {
2525
+ "epoch": 0.6,
2526
+ "learning_rate": 4.040605865291653e-06,
2527
+ "loss": 0.0277,
2528
+ "step": 37584
2529
+ },
2530
+ {
2531
+ "epoch": 0.6,
2532
+ "learning_rate": 4.021914276506606e-06,
2533
+ "loss": 0.0264,
2534
+ "step": 37700
2535
+ },
2536
+ {
2537
+ "epoch": 0.6,
2538
+ "eval_loss": 0.12097407132387161,
2539
+ "eval_runtime": 784.7681,
2540
+ "eval_samples_per_second": 2.184,
2541
+ "eval_steps_per_second": 0.274,
2542
+ "eval_wer": 9.113376583256102,
2543
+ "step": 37700
2544
+ },
2545
+ {
2546
+ "epoch": 0.6,
2547
+ "learning_rate": 4.00322268772156e-06,
2548
+ "loss": 0.0274,
2549
+ "step": 37816
2550
+ },
2551
+ {
2552
+ "epoch": 0.61,
2553
+ "learning_rate": 3.984531098936513e-06,
2554
+ "loss": 0.0274,
2555
+ "step": 37932
2556
+ },
2557
+ {
2558
+ "epoch": 0.61,
2559
+ "learning_rate": 3.965839510151467e-06,
2560
+ "loss": 0.0272,
2561
+ "step": 38048
2562
+ },
2563
+ {
2564
+ "epoch": 0.61,
2565
+ "learning_rate": 3.94714792136642e-06,
2566
+ "loss": 0.0247,
2567
+ "step": 38164
2568
+ },
2569
+ {
2570
+ "epoch": 0.61,
2571
+ "learning_rate": 3.928456332581373e-06,
2572
+ "loss": 0.0267,
2573
+ "step": 38280
2574
+ },
2575
+ {
2576
+ "epoch": 0.61,
2577
+ "eval_loss": 0.11954796314239502,
2578
+ "eval_runtime": 787.0916,
2579
+ "eval_samples_per_second": 2.178,
2580
+ "eval_steps_per_second": 0.273,
2581
+ "eval_wer": 8.691174956235196,
2582
+ "step": 38280
2583
+ },
2584
+ {
2585
+ "epoch": 0.61,
2586
+ "learning_rate": 3.909764743796327e-06,
2587
+ "loss": 0.0258,
2588
+ "step": 38396
2589
+ },
2590
+ {
2591
+ "epoch": 0.61,
2592
+ "learning_rate": 3.89107315501128e-06,
2593
+ "loss": 0.027,
2594
+ "step": 38512
2595
+ },
2596
+ {
2597
+ "epoch": 0.62,
2598
+ "learning_rate": 3.872381566226233e-06,
2599
+ "loss": 0.0218,
2600
+ "step": 38628
2601
+ },
2602
+ {
2603
+ "epoch": 0.62,
2604
+ "learning_rate": 3.853689977441186e-06,
2605
+ "loss": 0.0271,
2606
+ "step": 38744
2607
+ },
2608
+ {
2609
+ "epoch": 0.62,
2610
+ "learning_rate": 3.834998388656139e-06,
2611
+ "loss": 0.0265,
2612
+ "step": 38860
2613
+ },
2614
+ {
2615
+ "epoch": 0.62,
2616
+ "eval_loss": 0.122675821185112,
2617
+ "eval_runtime": 782.1019,
2618
+ "eval_samples_per_second": 2.192,
2619
+ "eval_steps_per_second": 0.275,
2620
+ "eval_wer": 8.938317372052312,
2621
+ "step": 38860
2622
+ },
2623
+ {
2624
+ "epoch": 0.62,
2625
+ "learning_rate": 3.816306799871093e-06,
2626
+ "loss": 0.0262,
2627
+ "step": 38976
2628
+ },
2629
+ {
2630
+ "epoch": 0.62,
2631
+ "learning_rate": 3.797615211086046e-06,
2632
+ "loss": 0.0247,
2633
+ "step": 39092
2634
+ },
2635
+ {
2636
+ "epoch": 0.63,
2637
+ "learning_rate": 3.7789236223009994e-06,
2638
+ "loss": 0.0244,
2639
+ "step": 39208
2640
+ },
2641
+ {
2642
+ "epoch": 0.63,
2643
+ "learning_rate": 3.7602320335159524e-06,
2644
+ "loss": 0.0238,
2645
+ "step": 39324
2646
+ },
2647
+ {
2648
+ "epoch": 0.63,
2649
+ "learning_rate": 3.7415404447309054e-06,
2650
+ "loss": 0.0249,
2651
+ "step": 39440
2652
+ },
2653
+ {
2654
+ "epoch": 0.63,
2655
+ "eval_loss": 0.12250470370054245,
2656
+ "eval_runtime": 786.0666,
2657
+ "eval_samples_per_second": 2.18,
2658
+ "eval_steps_per_second": 0.274,
2659
+ "eval_wer": 9.020698177324684,
2660
+ "step": 39440
2661
+ },
2662
+ {
2663
+ "epoch": 0.63,
2664
+ "learning_rate": 3.7228488559458593e-06,
2665
+ "loss": 0.0212,
2666
+ "step": 39556
2667
+ },
2668
+ {
2669
+ "epoch": 0.63,
2670
+ "learning_rate": 3.7041572671608123e-06,
2671
+ "loss": 0.0245,
2672
+ "step": 39672
2673
+ },
2674
+ {
2675
+ "epoch": 0.64,
2676
+ "learning_rate": 3.6854656783757658e-06,
2677
+ "loss": 0.0248,
2678
+ "step": 39788
2679
+ },
2680
+ {
2681
+ "epoch": 0.64,
2682
+ "learning_rate": 3.6667740895907188e-06,
2683
+ "loss": 0.0246,
2684
+ "step": 39904
2685
+ },
2686
+ {
2687
+ "epoch": 0.64,
2688
+ "learning_rate": 3.6480825008056726e-06,
2689
+ "loss": 0.0243,
2690
+ "step": 40020
2691
+ },
2692
+ {
2693
+ "epoch": 0.64,
2694
+ "eval_loss": 0.11990202963352203,
2695
+ "eval_runtime": 783.4725,
2696
+ "eval_samples_per_second": 2.188,
2697
+ "eval_steps_per_second": 0.274,
2698
+ "eval_wer": 8.608794150962826,
2699
+ "step": 40020
2700
+ },
2701
+ {
2702
+ "epoch": 0.64,
2703
+ "learning_rate": 3.6293909120206257e-06,
2704
+ "loss": 0.0254,
2705
+ "step": 40136
2706
+ },
2707
+ {
2708
+ "epoch": 0.64,
2709
+ "learning_rate": 3.6106993232355787e-06,
2710
+ "loss": 0.0232,
2711
+ "step": 40252
2712
+ },
2713
+ {
2714
+ "epoch": 0.64,
2715
+ "learning_rate": 3.592007734450532e-06,
2716
+ "loss": 0.0228,
2717
+ "step": 40368
2718
+ },
2719
+ {
2720
+ "epoch": 0.65,
2721
+ "learning_rate": 3.573316145665485e-06,
2722
+ "loss": 0.0235,
2723
+ "step": 40484
2724
+ },
2725
+ {
2726
+ "epoch": 0.65,
2727
+ "learning_rate": 3.554624556880439e-06,
2728
+ "loss": 0.028,
2729
+ "step": 40600
2730
+ },
2731
+ {
2732
+ "epoch": 0.65,
2733
+ "eval_loss": 0.11790936440229416,
2734
+ "eval_runtime": 795.6896,
2735
+ "eval_samples_per_second": 2.154,
2736
+ "eval_steps_per_second": 0.27,
2737
+ "eval_wer": 8.722067758212336,
2738
+ "step": 40600
2739
+ },
2740
+ {
2741
+ "epoch": 0.65,
2742
+ "learning_rate": 3.535932968095392e-06,
2743
+ "loss": 0.0245,
2744
+ "step": 40716
2745
+ },
2746
+ {
2747
+ "epoch": 0.65,
2748
+ "learning_rate": 3.517241379310345e-06,
2749
+ "loss": 0.0244,
2750
+ "step": 40832
2751
+ },
2752
+ {
2753
+ "epoch": 0.65,
2754
+ "learning_rate": 3.4985497905252985e-06,
2755
+ "loss": 0.0248,
2756
+ "step": 40948
2757
+ },
2758
+ {
2759
+ "epoch": 0.66,
2760
+ "learning_rate": 3.4798582017402515e-06,
2761
+ "loss": 0.0247,
2762
+ "step": 41064
2763
+ },
2764
+ {
2765
+ "epoch": 0.66,
2766
+ "learning_rate": 3.461166612955205e-06,
2767
+ "loss": 0.0237,
2768
+ "step": 41180
2769
+ },
2770
+ {
2771
+ "epoch": 0.66,
2772
+ "eval_loss": 0.1159593015909195,
2773
+ "eval_runtime": 782.6245,
2774
+ "eval_samples_per_second": 2.19,
2775
+ "eval_steps_per_second": 0.275,
2776
+ "eval_wer": 8.752960560189475,
2777
+ "step": 41180
2778
+ },
2779
+ {
2780
+ "epoch": 0.66,
2781
+ "learning_rate": 3.442475024170158e-06,
2782
+ "loss": 0.0271,
2783
+ "step": 41296
2784
+ },
2785
+ {
2786
+ "epoch": 0.66,
2787
+ "learning_rate": 3.4237834353851115e-06,
2788
+ "loss": 0.0243,
2789
+ "step": 41412
2790
+ },
2791
+ {
2792
+ "epoch": 0.66,
2793
+ "learning_rate": 3.405091846600065e-06,
2794
+ "loss": 0.0239,
2795
+ "step": 41528
2796
+ },
2797
+ {
2798
+ "epoch": 0.66,
2799
+ "learning_rate": 3.386400257815018e-06,
2800
+ "loss": 0.0262,
2801
+ "step": 41644
2802
+ },
2803
+ {
2804
+ "epoch": 0.67,
2805
+ "learning_rate": 3.3677086690299714e-06,
2806
+ "loss": 0.025,
2807
+ "step": 41760
2808
+ },
2809
+ {
2810
+ "epoch": 0.67,
2811
+ "eval_loss": 0.11735337227582932,
2812
+ "eval_runtime": 784.0887,
2813
+ "eval_samples_per_second": 2.186,
2814
+ "eval_steps_per_second": 0.274,
2815
+ "eval_wer": 9.010400576665637,
2816
+ "step": 41760
2817
+ },
2818
+ {
2819
+ "epoch": 0.67,
2820
+ "learning_rate": 3.3490170802449244e-06,
2821
+ "loss": 0.0267,
2822
+ "step": 41876
2823
+ },
2824
+ {
2825
+ "epoch": 0.67,
2826
+ "learning_rate": 3.3303254914598774e-06,
2827
+ "loss": 0.0211,
2828
+ "step": 41992
2829
+ },
2830
+ {
2831
+ "epoch": 0.67,
2832
+ "learning_rate": 3.3116339026748313e-06,
2833
+ "loss": 0.0213,
2834
+ "step": 42108
2835
+ },
2836
+ {
2837
+ "epoch": 0.67,
2838
+ "learning_rate": 3.2929423138897843e-06,
2839
+ "loss": 0.0229,
2840
+ "step": 42224
2841
+ },
2842
+ {
2843
+ "epoch": 0.68,
2844
+ "learning_rate": 3.2742507251047378e-06,
2845
+ "loss": 0.0222,
2846
+ "step": 42340
2847
+ },
2848
+ {
2849
+ "epoch": 0.68,
2850
+ "eval_loss": 0.12385321408510208,
2851
+ "eval_runtime": 782.3158,
2852
+ "eval_samples_per_second": 2.191,
2853
+ "eval_steps_per_second": 0.275,
2854
+ "eval_wer": 8.794150962825661,
2855
+ "step": 42340
2856
+ },
2857
+ {
2858
+ "epoch": 0.68,
2859
+ "learning_rate": 3.2555591363196908e-06,
2860
+ "loss": 0.0217,
2861
+ "step": 42456
2862
+ },
2863
+ {
2864
+ "epoch": 0.68,
2865
+ "learning_rate": 3.236867547534644e-06,
2866
+ "loss": 0.0214,
2867
+ "step": 42572
2868
+ },
2869
+ {
2870
+ "epoch": 0.68,
2871
+ "learning_rate": 3.2181759587495977e-06,
2872
+ "loss": 0.0245,
2873
+ "step": 42688
2874
+ },
2875
+ {
2876
+ "epoch": 0.68,
2877
+ "learning_rate": 3.1994843699645507e-06,
2878
+ "loss": 0.0247,
2879
+ "step": 42804
2880
+ },
2881
+ {
2882
+ "epoch": 0.69,
2883
+ "learning_rate": 3.180792781179504e-06,
2884
+ "loss": 0.0231,
2885
+ "step": 42920
2886
+ },
2887
+ {
2888
+ "epoch": 0.69,
2889
+ "eval_loss": 0.11846602708101273,
2890
+ "eval_runtime": 793.246,
2891
+ "eval_samples_per_second": 2.161,
2892
+ "eval_steps_per_second": 0.271,
2893
+ "eval_wer": 8.299866131191433,
2894
+ "step": 42920
2895
+ },
2896
+ {
2897
+ "epoch": 0.69,
2898
+ "learning_rate": 3.162101192394457e-06,
2899
+ "loss": 0.0237,
2900
+ "step": 43036
2901
+ },
2902
+ {
2903
+ "epoch": 0.69,
2904
+ "learning_rate": 3.14340960360941e-06,
2905
+ "loss": 0.0255,
2906
+ "step": 43152
2907
+ },
2908
+ {
2909
+ "epoch": 0.69,
2910
+ "learning_rate": 3.124718014824364e-06,
2911
+ "loss": 0.0229,
2912
+ "step": 43268
2913
+ },
2914
+ {
2915
+ "epoch": 0.69,
2916
+ "learning_rate": 3.106026426039317e-06,
2917
+ "loss": 0.0255,
2918
+ "step": 43384
2919
+ },
2920
+ {
2921
+ "epoch": 0.69,
2922
+ "learning_rate": 3.0873348372542705e-06,
2923
+ "loss": 0.0241,
2924
+ "step": 43500
2925
+ },
2926
+ {
2927
+ "epoch": 0.69,
2928
+ "eval_loss": 0.11603421717882156,
2929
+ "eval_runtime": 782.7894,
2930
+ "eval_samples_per_second": 2.19,
2931
+ "eval_steps_per_second": 0.275,
2932
+ "eval_wer": 8.474925342395222,
2933
+ "step": 43500
2934
+ },
2935
+ {
2936
+ "epoch": 0.7,
2937
+ "learning_rate": 3.0686432484692235e-06,
2938
+ "loss": 0.023,
2939
+ "step": 43616
2940
+ },
2941
+ {
2942
+ "epoch": 0.7,
2943
+ "learning_rate": 3.0499516596841766e-06,
2944
+ "loss": 0.0206,
2945
+ "step": 43732
2946
+ },
2947
+ {
2948
+ "epoch": 0.7,
2949
+ "learning_rate": 3.03126007089913e-06,
2950
+ "loss": 0.0214,
2951
+ "step": 43848
2952
+ },
2953
+ {
2954
+ "epoch": 0.7,
2955
+ "learning_rate": 3.0125684821140835e-06,
2956
+ "loss": 0.0248,
2957
+ "step": 43964
2958
+ },
2959
+ {
2960
+ "epoch": 0.7,
2961
+ "learning_rate": 2.993876893329037e-06,
2962
+ "loss": 0.0222,
2963
+ "step": 44080
2964
+ },
2965
+ {
2966
+ "epoch": 0.7,
2967
+ "eval_loss": 0.11518887430429459,
2968
+ "eval_runtime": 778.913,
2969
+ "eval_samples_per_second": 2.201,
2970
+ "eval_steps_per_second": 0.276,
2971
+ "eval_wer": 8.382246936463805,
2972
+ "step": 44080
2973
+ },
2974
+ {
2975
+ "epoch": 0.71,
2976
+ "learning_rate": 2.97518530454399e-06,
2977
+ "loss": 0.026,
2978
+ "step": 44196
2979
+ },
2980
+ {
2981
+ "epoch": 0.71,
2982
+ "learning_rate": 2.956493715758943e-06,
2983
+ "loss": 0.0238,
2984
+ "step": 44312
2985
+ },
2986
+ {
2987
+ "epoch": 0.71,
2988
+ "learning_rate": 2.9378021269738964e-06,
2989
+ "loss": 0.0225,
2990
+ "step": 44428
2991
+ },
2992
+ {
2993
+ "epoch": 0.71,
2994
+ "learning_rate": 2.9191105381888494e-06,
2995
+ "loss": 0.0223,
2996
+ "step": 44544
2997
+ },
2998
+ {
2999
+ "epoch": 0.71,
3000
+ "learning_rate": 2.9004189494038033e-06,
3001
+ "loss": 0.0217,
3002
+ "step": 44660
3003
+ },
3004
+ {
3005
+ "epoch": 0.71,
3006
+ "eval_loss": 0.11238180845975876,
3007
+ "eval_runtime": 790.4158,
3008
+ "eval_samples_per_second": 2.168,
3009
+ "eval_steps_per_second": 0.272,
3010
+ "eval_wer": 8.629389352280919,
3011
+ "step": 44660
3012
+ },
3013
+ {
3014
+ "epoch": 0.71,
3015
+ "learning_rate": 2.8817273606187563e-06,
3016
+ "loss": 0.022,
3017
+ "step": 44776
3018
+ },
3019
+ {
3020
+ "epoch": 0.72,
3021
+ "learning_rate": 2.8630357718337093e-06,
3022
+ "loss": 0.0229,
3023
+ "step": 44892
3024
+ },
3025
+ {
3026
+ "epoch": 0.72,
3027
+ "learning_rate": 2.8443441830486628e-06,
3028
+ "loss": 0.0195,
3029
+ "step": 45008
3030
+ },
3031
+ {
3032
+ "epoch": 0.72,
3033
+ "learning_rate": 2.825652594263616e-06,
3034
+ "loss": 0.0198,
3035
+ "step": 45124
3036
+ },
3037
+ {
3038
+ "epoch": 0.72,
3039
+ "learning_rate": 2.8069610054785697e-06,
3040
+ "loss": 0.0212,
3041
+ "step": 45240
3042
+ },
3043
+ {
3044
+ "epoch": 0.72,
3045
+ "eval_loss": 0.11343366652727127,
3046
+ "eval_runtime": 788.7719,
3047
+ "eval_samples_per_second": 2.173,
3048
+ "eval_steps_per_second": 0.273,
3049
+ "eval_wer": 8.299866131191433,
3050
+ "step": 45240
3051
+ },
3052
+ {
3053
+ "epoch": 0.72,
3054
+ "learning_rate": 2.7882694166935227e-06,
3055
+ "loss": 0.0216,
3056
+ "step": 45356
3057
+ },
3058
+ {
3059
+ "epoch": 0.73,
3060
+ "learning_rate": 2.7695778279084757e-06,
3061
+ "loss": 0.0214,
3062
+ "step": 45472
3063
+ },
3064
+ {
3065
+ "epoch": 0.73,
3066
+ "learning_rate": 2.750886239123429e-06,
3067
+ "loss": 0.0199,
3068
+ "step": 45588
3069
+ },
3070
+ {
3071
+ "epoch": 0.73,
3072
+ "learning_rate": 2.732194650338382e-06,
3073
+ "loss": 0.022,
3074
+ "step": 45704
3075
+ },
3076
+ {
3077
+ "epoch": 0.73,
3078
+ "learning_rate": 2.713503061553336e-06,
3079
+ "loss": 0.019,
3080
+ "step": 45820
3081
+ },
3082
+ {
3083
+ "epoch": 0.73,
3084
+ "eval_loss": 0.11607277393341064,
3085
+ "eval_runtime": 785.504,
3086
+ "eval_samples_per_second": 2.182,
3087
+ "eval_steps_per_second": 0.274,
3088
+ "eval_wer": 8.196890124600968,
3089
+ "step": 45820
3090
+ },
3091
+ {
3092
+ "epoch": 0.73,
3093
+ "learning_rate": 2.694811472768289e-06,
3094
+ "loss": 0.0222,
3095
+ "step": 45936
3096
+ },
3097
+ {
3098
+ "epoch": 0.74,
3099
+ "learning_rate": 2.6761198839832425e-06,
3100
+ "loss": 0.0205,
3101
+ "step": 46052
3102
+ },
3103
+ {
3104
+ "epoch": 0.74,
3105
+ "learning_rate": 2.6574282951981955e-06,
3106
+ "loss": 0.0217,
3107
+ "step": 46168
3108
+ },
3109
+ {
3110
+ "epoch": 0.74,
3111
+ "learning_rate": 2.6387367064131486e-06,
3112
+ "loss": 0.0222,
3113
+ "step": 46284
3114
+ },
3115
+ {
3116
+ "epoch": 0.74,
3117
+ "learning_rate": 2.620045117628102e-06,
3118
+ "loss": 0.0198,
3119
+ "step": 46400
3120
+ },
3121
+ {
3122
+ "epoch": 0.74,
3123
+ "eval_loss": 0.11315659433603287,
3124
+ "eval_runtime": 783.5623,
3125
+ "eval_samples_per_second": 2.187,
3126
+ "eval_steps_per_second": 0.274,
3127
+ "eval_wer": 8.413139738440943,
3128
+ "step": 46400
3129
+ },
3130
+ {
3131
+ "epoch": 0.74,
3132
+ "learning_rate": 2.6013535288430555e-06,
3133
+ "loss": 0.0225,
3134
+ "step": 46516
3135
+ },
3136
+ {
3137
+ "epoch": 0.74,
3138
+ "learning_rate": 2.582661940058009e-06,
3139
+ "loss": 0.0234,
3140
+ "step": 46632
3141
+ },
3142
+ {
3143
+ "epoch": 0.75,
3144
+ "learning_rate": 2.563970351272962e-06,
3145
+ "loss": 0.0225,
3146
+ "step": 46748
3147
+ },
3148
+ {
3149
+ "epoch": 0.75,
3150
+ "learning_rate": 2.545278762487915e-06,
3151
+ "loss": 0.0194,
3152
+ "step": 46864
3153
+ },
3154
+ {
3155
+ "epoch": 0.75,
3156
+ "learning_rate": 2.5265871737028684e-06,
3157
+ "loss": 0.0239,
3158
+ "step": 46980
3159
+ },
3160
+ {
3161
+ "epoch": 0.75,
3162
+ "eval_loss": 0.11144877225160599,
3163
+ "eval_runtime": 793.7955,
3164
+ "eval_samples_per_second": 2.159,
3165
+ "eval_steps_per_second": 0.271,
3166
+ "eval_wer": 8.10421171866955,
3167
+ "step": 46980
3168
+ },
3169
+ {
3170
+ "epoch": 0.75,
3171
+ "learning_rate": 2.5078955849178214e-06,
3172
+ "loss": 0.0188,
3173
+ "step": 47096
3174
+ },
3175
+ {
3176
+ "epoch": 0.75,
3177
+ "learning_rate": 2.489203996132775e-06,
3178
+ "loss": 0.0215,
3179
+ "step": 47212
3180
+ },
3181
+ {
3182
+ "epoch": 0.76,
3183
+ "learning_rate": 2.4705124073477283e-06,
3184
+ "loss": 0.0192,
3185
+ "step": 47328
3186
+ },
3187
+ {
3188
+ "epoch": 0.76,
3189
+ "learning_rate": 2.4518208185626818e-06,
3190
+ "loss": 0.0209,
3191
+ "step": 47444
3192
+ },
3193
+ {
3194
+ "epoch": 0.76,
3195
+ "learning_rate": 2.4331292297776348e-06,
3196
+ "loss": 0.0195,
3197
+ "step": 47560
3198
+ },
3199
+ {
3200
+ "epoch": 0.76,
3201
+ "eval_loss": 0.11171752959489822,
3202
+ "eval_runtime": 787.1503,
3203
+ "eval_samples_per_second": 2.177,
3204
+ "eval_steps_per_second": 0.273,
3205
+ "eval_wer": 8.227782926578108,
3206
+ "step": 47560
3207
+ },
3208
+ {
3209
+ "epoch": 0.76,
3210
+ "learning_rate": 2.414437640992588e-06,
3211
+ "loss": 0.0193,
3212
+ "step": 47676
3213
+ },
3214
+ {
3215
+ "epoch": 0.76,
3216
+ "learning_rate": 2.3957460522075413e-06,
3217
+ "loss": 0.0177,
3218
+ "step": 47792
3219
+ },
3220
+ {
3221
+ "epoch": 0.76,
3222
+ "learning_rate": 2.3770544634224947e-06,
3223
+ "loss": 0.0194,
3224
+ "step": 47908
3225
+ },
3226
+ {
3227
+ "epoch": 0.77,
3228
+ "learning_rate": 2.3583628746374477e-06,
3229
+ "loss": 0.0192,
3230
+ "step": 48024
3231
+ },
3232
+ {
3233
+ "epoch": 0.77,
3234
+ "learning_rate": 2.339671285852401e-06,
3235
+ "loss": 0.0208,
3236
+ "step": 48140
3237
+ },
3238
+ {
3239
+ "epoch": 0.77,
3240
+ "eval_loss": 0.10952310264110565,
3241
+ "eval_runtime": 788.6548,
3242
+ "eval_samples_per_second": 2.173,
3243
+ "eval_steps_per_second": 0.273,
3244
+ "eval_wer": 8.042426114715271,
3245
+ "step": 48140
3246
+ },
3247
+ {
3248
+ "epoch": 0.77,
3249
+ "learning_rate": 2.320979697067354e-06,
3250
+ "loss": 0.0197,
3251
+ "step": 48256
3252
+ },
3253
+ {
3254
+ "epoch": 0.77,
3255
+ "learning_rate": 2.3022881082823076e-06,
3256
+ "loss": 0.0192,
3257
+ "step": 48372
3258
+ },
3259
+ {
3260
+ "epoch": 0.77,
3261
+ "learning_rate": 2.283757653883339e-06,
3262
+ "loss": 0.0178,
3263
+ "step": 48488
3264
+ },
3265
+ {
3266
+ "epoch": 0.78,
3267
+ "learning_rate": 2.2650660650982924e-06,
3268
+ "loss": 0.0186,
3269
+ "step": 48604
3270
+ },
3271
+ {
3272
+ "epoch": 0.78,
3273
+ "learning_rate": 2.2463744763132454e-06,
3274
+ "loss": 0.0201,
3275
+ "step": 48720
3276
+ },
3277
+ {
3278
+ "epoch": 0.78,
3279
+ "eval_loss": 0.10950493812561035,
3280
+ "eval_runtime": 783.1853,
3281
+ "eval_samples_per_second": 2.188,
3282
+ "eval_steps_per_second": 0.275,
3283
+ "eval_wer": 7.867366903511481,
3284
+ "step": 48720
3285
+ },
3286
+ {
3287
+ "epoch": 0.78,
3288
+ "learning_rate": 2.2276828875281985e-06,
3289
+ "loss": 0.0191,
3290
+ "step": 48836
3291
+ },
3292
+ {
3293
+ "epoch": 0.78,
3294
+ "learning_rate": 2.208991298743152e-06,
3295
+ "loss": 0.0194,
3296
+ "step": 48952
3297
+ },
3298
+ {
3299
+ "epoch": 0.78,
3300
+ "learning_rate": 2.1902997099581054e-06,
3301
+ "loss": 0.0204,
3302
+ "step": 49068
3303
+ },
3304
+ {
3305
+ "epoch": 0.79,
3306
+ "learning_rate": 2.1716081211730584e-06,
3307
+ "loss": 0.0205,
3308
+ "step": 49184
3309
+ },
3310
+ {
3311
+ "epoch": 0.79,
3312
+ "learning_rate": 2.152916532388012e-06,
3313
+ "loss": 0.0191,
3314
+ "step": 49300
3315
+ },
3316
+ {
3317
+ "epoch": 0.79,
3318
+ "eval_loss": 0.11016014218330383,
3319
+ "eval_runtime": 787.2967,
3320
+ "eval_samples_per_second": 2.177,
3321
+ "eval_steps_per_second": 0.273,
3322
+ "eval_wer": 8.052723715374317,
3323
+ "step": 49300
3324
+ },
3325
+ {
3326
+ "epoch": 0.79,
3327
+ "learning_rate": 2.134224943602965e-06,
3328
+ "loss": 0.0186,
3329
+ "step": 49416
3330
+ },
3331
+ {
3332
+ "epoch": 0.79,
3333
+ "learning_rate": 2.1155333548179183e-06,
3334
+ "loss": 0.0177,
3335
+ "step": 49532
3336
+ },
3337
+ {
3338
+ "epoch": 0.79,
3339
+ "learning_rate": 2.0968417660328717e-06,
3340
+ "loss": 0.019,
3341
+ "step": 49648
3342
+ },
3343
+ {
3344
+ "epoch": 0.79,
3345
+ "learning_rate": 2.0781501772478248e-06,
3346
+ "loss": 0.02,
3347
+ "step": 49764
3348
+ },
3349
+ {
3350
+ "epoch": 0.8,
3351
+ "learning_rate": 2.059458588462778e-06,
3352
+ "loss": 0.0192,
3353
+ "step": 49880
3354
+ },
3355
+ {
3356
+ "epoch": 0.8,
3357
+ "eval_loss": 0.10825244337320328,
3358
+ "eval_runtime": 784.2924,
3359
+ "eval_samples_per_second": 2.185,
3360
+ "eval_steps_per_second": 0.274,
3361
+ "eval_wer": 7.908557306147667,
3362
+ "step": 49880
3363
+ },
3364
+ {
3365
+ "epoch": 0.8,
3366
+ "learning_rate": 2.0407669996777312e-06,
3367
+ "loss": 0.0223,
3368
+ "step": 49996
3369
+ },
3370
+ {
3371
+ "epoch": 0.8,
3372
+ "learning_rate": 2.0220754108926847e-06,
3373
+ "loss": 0.0178,
3374
+ "step": 50112
3375
+ },
3376
+ {
3377
+ "epoch": 0.8,
3378
+ "learning_rate": 2.003383822107638e-06,
3379
+ "loss": 0.017,
3380
+ "step": 50228
3381
+ },
3382
+ {
3383
+ "epoch": 0.8,
3384
+ "learning_rate": 1.984692233322591e-06,
3385
+ "loss": 0.0175,
3386
+ "step": 50344
3387
+ },
3388
+ {
3389
+ "epoch": 0.81,
3390
+ "learning_rate": 1.9661617789236225e-06,
3391
+ "loss": 0.0201,
3392
+ "step": 50460
3393
+ },
3394
+ {
3395
+ "epoch": 0.81,
3396
+ "eval_loss": 0.10784495621919632,
3397
+ "eval_runtime": 782.5124,
3398
+ "eval_samples_per_second": 2.19,
3399
+ "eval_steps_per_second": 0.275,
3400
+ "eval_wer": 7.805581299557203,
3401
+ "step": 50460
3402
+ },
3403
+ {
3404
+ "epoch": 0.81,
3405
+ "learning_rate": 1.9474701901385755e-06,
3406
+ "loss": 0.0184,
3407
+ "step": 50576
3408
+ },
3409
+ {
3410
+ "epoch": 0.81,
3411
+ "learning_rate": 1.928778601353529e-06,
3412
+ "loss": 0.0175,
3413
+ "step": 50692
3414
+ },
3415
+ {
3416
+ "epoch": 0.81,
3417
+ "learning_rate": 1.9100870125684824e-06,
3418
+ "loss": 0.0191,
3419
+ "step": 50808
3420
+ },
3421
+ {
3422
+ "epoch": 0.81,
3423
+ "learning_rate": 1.8913954237834356e-06,
3424
+ "loss": 0.0169,
3425
+ "step": 50924
3426
+ },
3427
+ {
3428
+ "epoch": 0.81,
3429
+ "learning_rate": 1.8727038349983889e-06,
3430
+ "loss": 0.0186,
3431
+ "step": 51040
3432
+ },
3433
+ {
3434
+ "epoch": 0.81,
3435
+ "eval_loss": 0.10628069937229156,
3436
+ "eval_runtime": 794.0691,
3437
+ "eval_samples_per_second": 2.159,
3438
+ "eval_steps_per_second": 0.271,
3439
+ "eval_wer": 7.527546081762949,
3440
+ "step": 51040
3441
+ },
3442
+ {
3443
+ "epoch": 0.82,
3444
+ "learning_rate": 1.8540122462133419e-06,
3445
+ "loss": 0.0172,
3446
+ "step": 51156
3447
+ },
3448
+ {
3449
+ "epoch": 0.82,
3450
+ "learning_rate": 1.8353206574282953e-06,
3451
+ "loss": 0.0182,
3452
+ "step": 51272
3453
+ },
3454
+ {
3455
+ "epoch": 0.82,
3456
+ "learning_rate": 1.8166290686432486e-06,
3457
+ "loss": 0.0175,
3458
+ "step": 51388
3459
+ },
3460
+ {
3461
+ "epoch": 0.82,
3462
+ "learning_rate": 1.797937479858202e-06,
3463
+ "loss": 0.0181,
3464
+ "step": 51504
3465
+ },
3466
+ {
3467
+ "epoch": 0.82,
3468
+ "learning_rate": 1.7792458910731552e-06,
3469
+ "loss": 0.0172,
3470
+ "step": 51620
3471
+ },
3472
+ {
3473
+ "epoch": 0.82,
3474
+ "eval_loss": 0.10713626444339752,
3475
+ "eval_runtime": 787.3756,
3476
+ "eval_samples_per_second": 2.177,
3477
+ "eval_steps_per_second": 0.273,
3478
+ "eval_wer": 7.898259705488621,
3479
+ "step": 51620
3480
+ },
3481
+ {
3482
+ "epoch": 0.83,
3483
+ "learning_rate": 1.7605543022881083e-06,
3484
+ "loss": 0.0165,
3485
+ "step": 51736
3486
+ },
3487
+ {
3488
+ "epoch": 0.83,
3489
+ "learning_rate": 1.7418627135030617e-06,
3490
+ "loss": 0.0189,
3491
+ "step": 51852
3492
+ },
3493
+ {
3494
+ "epoch": 0.83,
3495
+ "learning_rate": 1.723171124718015e-06,
3496
+ "loss": 0.0182,
3497
+ "step": 51968
3498
+ },
3499
+ {
3500
+ "epoch": 0.83,
3501
+ "learning_rate": 1.7044795359329682e-06,
3502
+ "loss": 0.0171,
3503
+ "step": 52084
3504
+ },
3505
+ {
3506
+ "epoch": 0.83,
3507
+ "learning_rate": 1.6857879471479216e-06,
3508
+ "loss": 0.0163,
3509
+ "step": 52200
3510
+ },
3511
+ {
3512
+ "epoch": 0.83,
3513
+ "eval_loss": 0.10758110135793686,
3514
+ "eval_runtime": 786.0519,
3515
+ "eval_samples_per_second": 2.181,
3516
+ "eval_steps_per_second": 0.274,
3517
+ "eval_wer": 7.64081968901246,
3518
+ "step": 52200
3519
+ },
3520
+ {
3521
+ "epoch": 0.84,
3522
+ "learning_rate": 1.6670963583628746e-06,
3523
+ "loss": 0.018,
3524
+ "step": 52316
3525
+ },
3526
+ {
3527
+ "epoch": 0.84,
3528
+ "learning_rate": 1.6484047695778279e-06,
3529
+ "loss": 0.0169,
3530
+ "step": 52432
3531
+ },
3532
+ {
3533
+ "epoch": 0.84,
3534
+ "learning_rate": 1.6297131807927813e-06,
3535
+ "loss": 0.0172,
3536
+ "step": 52548
3537
+ },
3538
+ {
3539
+ "epoch": 0.84,
3540
+ "learning_rate": 1.6110215920077346e-06,
3541
+ "loss": 0.0146,
3542
+ "step": 52664
3543
+ },
3544
+ {
3545
+ "epoch": 0.84,
3546
+ "learning_rate": 1.592330003222688e-06,
3547
+ "loss": 0.0161,
3548
+ "step": 52780
3549
+ },
3550
+ {
3551
+ "epoch": 0.84,
3552
+ "eval_loss": 0.10708160698413849,
3553
+ "eval_runtime": 786.9909,
3554
+ "eval_samples_per_second": 2.178,
3555
+ "eval_steps_per_second": 0.273,
3556
+ "eval_wer": 7.92915250746576,
3557
+ "step": 52780
3558
+ },
3559
+ {
3560
+ "epoch": 0.84,
3561
+ "learning_rate": 1.5736384144376412e-06,
3562
+ "loss": 0.0168,
3563
+ "step": 52896
3564
+ },
3565
+ {
3566
+ "epoch": 0.85,
3567
+ "learning_rate": 1.5551079600386724e-06,
3568
+ "loss": 0.0166,
3569
+ "step": 53012
3570
+ },
3571
+ {
3572
+ "epoch": 0.85,
3573
+ "learning_rate": 1.5364163712536256e-06,
3574
+ "loss": 0.0162,
3575
+ "step": 53128
3576
+ },
3577
+ {
3578
+ "epoch": 0.85,
3579
+ "learning_rate": 1.517724782468579e-06,
3580
+ "loss": 0.0176,
3581
+ "step": 53244
3582
+ },
3583
+ {
3584
+ "epoch": 0.85,
3585
+ "learning_rate": 1.4990331936835323e-06,
3586
+ "loss": 0.0189,
3587
+ "step": 53360
3588
+ },
3589
+ {
3590
+ "epoch": 0.85,
3591
+ "eval_loss": 0.1049240455031395,
3592
+ "eval_runtime": 784.1212,
3593
+ "eval_samples_per_second": 2.186,
3594
+ "eval_steps_per_second": 0.274,
3595
+ "eval_wer": 7.589331685717228,
3596
+ "step": 53360
3597
+ },
3598
+ {
3599
+ "epoch": 0.85,
3600
+ "learning_rate": 1.4803416048984853e-06,
3601
+ "loss": 0.0152,
3602
+ "step": 53476
3603
+ },
3604
+ {
3605
+ "epoch": 0.86,
3606
+ "learning_rate": 1.4616500161134387e-06,
3607
+ "loss": 0.0193,
3608
+ "step": 53592
3609
+ },
3610
+ {
3611
+ "epoch": 0.86,
3612
+ "learning_rate": 1.442958427328392e-06,
3613
+ "loss": 0.0165,
3614
+ "step": 53708
3615
+ },
3616
+ {
3617
+ "epoch": 0.86,
3618
+ "learning_rate": 1.4242668385433452e-06,
3619
+ "loss": 0.0175,
3620
+ "step": 53824
3621
+ },
3622
+ {
3623
+ "epoch": 0.86,
3624
+ "learning_rate": 1.4055752497582987e-06,
3625
+ "loss": 0.0167,
3626
+ "step": 53940
3627
+ },
3628
+ {
3629
+ "epoch": 0.86,
3630
+ "eval_loss": 0.10481404513120651,
3631
+ "eval_runtime": 784.2538,
3632
+ "eval_samples_per_second": 2.186,
3633
+ "eval_steps_per_second": 0.274,
3634
+ "eval_wer": 7.568736484399135,
3635
+ "step": 53940
3636
+ },
3637
+ {
3638
+ "epoch": 0.86,
3639
+ "learning_rate": 1.386883660973252e-06,
3640
+ "loss": 0.0138,
3641
+ "step": 54056
3642
+ },
3643
+ {
3644
+ "epoch": 0.86,
3645
+ "learning_rate": 1.368192072188205e-06,
3646
+ "loss": 0.0183,
3647
+ "step": 54172
3648
+ },
3649
+ {
3650
+ "epoch": 0.87,
3651
+ "learning_rate": 1.3495004834031584e-06,
3652
+ "loss": 0.0158,
3653
+ "step": 54288
3654
+ },
3655
+ {
3656
+ "epoch": 0.87,
3657
+ "learning_rate": 1.3308088946181116e-06,
3658
+ "loss": 0.018,
3659
+ "step": 54404
3660
+ },
3661
+ {
3662
+ "epoch": 0.87,
3663
+ "learning_rate": 1.312117305833065e-06,
3664
+ "loss": 0.016,
3665
+ "step": 54520
3666
+ },
3667
+ {
3668
+ "epoch": 0.87,
3669
+ "eval_loss": 0.10257178544998169,
3670
+ "eval_runtime": 779.3466,
3671
+ "eval_samples_per_second": 2.199,
3672
+ "eval_steps_per_second": 0.276,
3673
+ "eval_wer": 7.537843682421997,
3674
+ "step": 54520
3675
+ },
3676
+ {
3677
+ "epoch": 0.87,
3678
+ "learning_rate": 1.2934257170480183e-06,
3679
+ "loss": 0.0182,
3680
+ "step": 54636
3681
+ },
3682
+ {
3683
+ "epoch": 0.87,
3684
+ "learning_rate": 1.2747341282629713e-06,
3685
+ "loss": 0.0167,
3686
+ "step": 54752
3687
+ },
3688
+ {
3689
+ "epoch": 0.88,
3690
+ "learning_rate": 1.2560425394779247e-06,
3691
+ "loss": 0.0182,
3692
+ "step": 54868
3693
+ },
3694
+ {
3695
+ "epoch": 0.88,
3696
+ "learning_rate": 1.237350950692878e-06,
3697
+ "loss": 0.0173,
3698
+ "step": 54984
3699
+ },
3700
+ {
3701
+ "epoch": 0.88,
3702
+ "learning_rate": 1.2186593619078312e-06,
3703
+ "loss": 0.016,
3704
+ "step": 55100
3705
+ },
3706
+ {
3707
+ "epoch": 0.88,
3708
+ "eval_loss": 0.1033649742603302,
3709
+ "eval_runtime": 792.3552,
3710
+ "eval_samples_per_second": 2.163,
3711
+ "eval_steps_per_second": 0.271,
3712
+ "eval_wer": 7.403974873854392,
3713
+ "step": 55100
3714
+ },
3715
+ {
3716
+ "epoch": 0.88,
3717
+ "learning_rate": 1.1999677731227845e-06,
3718
+ "loss": 0.0169,
3719
+ "step": 55216
3720
+ },
3721
+ {
3722
+ "epoch": 0.88,
3723
+ "learning_rate": 1.181276184337738e-06,
3724
+ "loss": 0.0151,
3725
+ "step": 55332
3726
+ },
3727
+ {
3728
+ "epoch": 0.89,
3729
+ "learning_rate": 1.162584595552691e-06,
3730
+ "loss": 0.0167,
3731
+ "step": 55448
3732
+ },
3733
+ {
3734
+ "epoch": 0.89,
3735
+ "learning_rate": 1.1438930067676444e-06,
3736
+ "loss": 0.015,
3737
+ "step": 55564
3738
+ },
3739
+ {
3740
+ "epoch": 0.89,
3741
+ "learning_rate": 1.1252014179825976e-06,
3742
+ "loss": 0.015,
3743
+ "step": 55680
3744
+ },
3745
+ {
3746
+ "epoch": 0.89,
3747
+ "eval_loss": 0.10430463403463364,
3748
+ "eval_runtime": 785.9691,
3749
+ "eval_samples_per_second": 2.181,
3750
+ "eval_steps_per_second": 0.274,
3751
+ "eval_wer": 7.424570075172484,
3752
+ "step": 55680
3753
+ },
3754
+ {
3755
+ "epoch": 0.89,
3756
+ "learning_rate": 1.1065098291975508e-06,
3757
+ "loss": 0.0183,
3758
+ "step": 55796
3759
+ },
3760
+ {
3761
+ "epoch": 0.89,
3762
+ "learning_rate": 1.087818240412504e-06,
3763
+ "loss": 0.0173,
3764
+ "step": 55912
3765
+ },
3766
+ {
3767
+ "epoch": 0.89,
3768
+ "learning_rate": 1.0691266516274573e-06,
3769
+ "loss": 0.015,
3770
+ "step": 56028
3771
+ },
3772
+ {
3773
+ "epoch": 0.9,
3774
+ "learning_rate": 1.0504350628424108e-06,
3775
+ "loss": 0.0156,
3776
+ "step": 56144
3777
+ },
3778
+ {
3779
+ "epoch": 0.9,
3780
+ "learning_rate": 1.031743474057364e-06,
3781
+ "loss": 0.0163,
3782
+ "step": 56260
3783
+ },
3784
+ {
3785
+ "epoch": 0.9,
3786
+ "eval_loss": 0.10278747975826263,
3787
+ "eval_runtime": 787.108,
3788
+ "eval_samples_per_second": 2.178,
3789
+ "eval_steps_per_second": 0.273,
3790
+ "eval_wer": 7.5481412830810415,
3791
+ "step": 56260
3792
+ },
3793
+ {
3794
+ "epoch": 0.9,
3795
+ "learning_rate": 1.0130518852723172e-06,
3796
+ "loss": 0.0176,
3797
+ "step": 56376
3798
+ },
3799
+ {
3800
+ "epoch": 0.9,
3801
+ "learning_rate": 9.943602964872705e-07,
3802
+ "loss": 0.0157,
3803
+ "step": 56492
3804
+ },
3805
+ {
3806
+ "epoch": 0.9,
3807
+ "learning_rate": 9.75668707702224e-07,
3808
+ "loss": 0.0138,
3809
+ "step": 56608
3810
+ },
3811
+ {
3812
+ "epoch": 0.91,
3813
+ "learning_rate": 9.56977118917177e-07,
3814
+ "loss": 0.015,
3815
+ "step": 56724
3816
+ },
3817
+ {
3818
+ "epoch": 0.91,
3819
+ "learning_rate": 9.382855301321303e-07,
3820
+ "loss": 0.0154,
3821
+ "step": 56840
3822
+ },
3823
+ {
3824
+ "epoch": 0.91,
3825
+ "eval_loss": 0.1029290109872818,
3826
+ "eval_runtime": 783.9469,
3827
+ "eval_samples_per_second": 2.186,
3828
+ "eval_steps_per_second": 0.274,
3829
+ "eval_wer": 7.455462877149624,
3830
+ "step": 56840
3831
+ },
3832
+ {
3833
+ "epoch": 0.91,
3834
+ "learning_rate": 9.197550757331615e-07,
3835
+ "loss": 0.017,
3836
+ "step": 56956
3837
+ },
3838
+ {
3839
+ "epoch": 0.91,
3840
+ "learning_rate": 9.010634869481148e-07,
3841
+ "loss": 0.0156,
3842
+ "step": 57072
3843
+ },
3844
+ {
3845
+ "epoch": 0.91,
3846
+ "learning_rate": 8.82371898163068e-07,
3847
+ "loss": 0.016,
3848
+ "step": 57188
3849
+ },
3850
+ {
3851
+ "epoch": 0.91,
3852
+ "learning_rate": 8.636803093780213e-07,
3853
+ "loss": 0.0144,
3854
+ "step": 57304
3855
+ },
3856
+ {
3857
+ "epoch": 0.92,
3858
+ "learning_rate": 8.449887205929746e-07,
3859
+ "loss": 0.0176,
3860
+ "step": 57420
3861
+ },
3862
+ {
3863
+ "epoch": 0.92,
3864
+ "eval_loss": 0.1018747016787529,
3865
+ "eval_runtime": 783.2187,
3866
+ "eval_samples_per_second": 2.188,
3867
+ "eval_steps_per_second": 0.275,
3868
+ "eval_wer": 7.49665327978581,
3869
+ "step": 57420
3870
+ },
3871
+ {
3872
+ "epoch": 0.92,
3873
+ "learning_rate": 8.262971318079279e-07,
3874
+ "loss": 0.018,
3875
+ "step": 57536
3876
+ },
3877
+ {
3878
+ "epoch": 0.92,
3879
+ "learning_rate": 8.076055430228811e-07,
3880
+ "loss": 0.0162,
3881
+ "step": 57652
3882
+ },
3883
+ {
3884
+ "epoch": 0.92,
3885
+ "learning_rate": 7.889139542378344e-07,
3886
+ "loss": 0.0146,
3887
+ "step": 57768
3888
+ },
3889
+ {
3890
+ "epoch": 0.92,
3891
+ "learning_rate": 7.702223654527877e-07,
3892
+ "loss": 0.0133,
3893
+ "step": 57884
3894
+ },
3895
+ {
3896
+ "epoch": 0.93,
3897
+ "learning_rate": 7.51530776667741e-07,
3898
+ "loss": 0.0167,
3899
+ "step": 58000
3900
+ },
3901
+ {
3902
+ "epoch": 0.93,
3903
+ "eval_loss": 0.10182846337556839,
3904
+ "eval_runtime": 789.1997,
3905
+ "eval_samples_per_second": 2.172,
3906
+ "eval_steps_per_second": 0.272,
3907
+ "eval_wer": 7.445165276490577,
3908
+ "step": 58000
3909
+ },
3910
+ {
3911
+ "epoch": 0.93,
3912
+ "learning_rate": 7.328391878826942e-07,
3913
+ "loss": 0.0164,
3914
+ "step": 58116
3915
+ },
3916
+ {
3917
+ "epoch": 0.93,
3918
+ "learning_rate": 7.141475990976475e-07,
3919
+ "loss": 0.0151,
3920
+ "step": 58232
3921
+ },
3922
+ {
3923
+ "epoch": 0.93,
3924
+ "learning_rate": 6.954560103126008e-07,
3925
+ "loss": 0.0136,
3926
+ "step": 58348
3927
+ },
3928
+ {
3929
+ "epoch": 0.93,
3930
+ "learning_rate": 6.76764421527554e-07,
3931
+ "loss": 0.0136,
3932
+ "step": 58464
3933
+ },
3934
+ {
3935
+ "epoch": 0.94,
3936
+ "learning_rate": 6.580728327425073e-07,
3937
+ "loss": 0.0163,
3938
+ "step": 58580
3939
+ },
3940
+ {
3941
+ "epoch": 0.94,
3942
+ "eval_loss": 0.1013648584485054,
3943
+ "eval_runtime": 785.0865,
3944
+ "eval_samples_per_second": 2.183,
3945
+ "eval_steps_per_second": 0.274,
3946
+ "eval_wer": 7.445165276490577,
3947
+ "step": 58580
3948
+ },
3949
+ {
3950
+ "epoch": 0.94,
3951
+ "learning_rate": 6.393812439574605e-07,
3952
+ "loss": 0.014,
3953
+ "step": 58696
3954
+ },
3955
+ {
3956
+ "epoch": 0.94,
3957
+ "learning_rate": 6.206896551724139e-07,
3958
+ "loss": 0.0144,
3959
+ "step": 58812
3960
+ },
3961
+ {
3962
+ "epoch": 0.94,
3963
+ "learning_rate": 6.019980663873671e-07,
3964
+ "loss": 0.0147,
3965
+ "step": 58928
3966
+ },
3967
+ {
3968
+ "epoch": 0.94,
3969
+ "learning_rate": 5.834676119883983e-07,
3970
+ "loss": 0.013,
3971
+ "step": 59044
3972
+ },
3973
+ {
3974
+ "epoch": 0.94,
3975
+ "learning_rate": 5.647760232033517e-07,
3976
+ "loss": 0.0132,
3977
+ "step": 59160
3978
+ },
3979
+ {
3980
+ "epoch": 0.94,
3981
+ "eval_loss": 0.10131796449422836,
3982
+ "eval_runtime": 782.9945,
3983
+ "eval_samples_per_second": 2.189,
3984
+ "eval_steps_per_second": 0.275,
3985
+ "eval_wer": 7.136237256719184,
3986
+ "step": 59160
3987
+ },
3988
+ {
3989
+ "epoch": 0.95,
3990
+ "learning_rate": 5.460844344183049e-07,
3991
+ "loss": 0.0125,
3992
+ "step": 59276
3993
+ },
3994
+ {
3995
+ "epoch": 0.95,
3996
+ "learning_rate": 5.273928456332581e-07,
3997
+ "loss": 0.0118,
3998
+ "step": 59392
3999
+ },
4000
+ {
4001
+ "epoch": 0.95,
4002
+ "learning_rate": 5.087012568482115e-07,
4003
+ "loss": 0.0156,
4004
+ "step": 59508
4005
+ },
4006
+ {
4007
+ "epoch": 0.95,
4008
+ "learning_rate": 4.900096680631647e-07,
4009
+ "loss": 0.0164,
4010
+ "step": 59624
4011
+ },
4012
+ {
4013
+ "epoch": 0.95,
4014
+ "learning_rate": 4.7147921366419595e-07,
4015
+ "loss": 0.0143,
4016
+ "step": 59740
4017
+ },
4018
+ {
4019
+ "epoch": 0.95,
4020
+ "eval_loss": 0.10116977989673615,
4021
+ "eval_runtime": 781.1213,
4022
+ "eval_samples_per_second": 2.194,
4023
+ "eval_steps_per_second": 0.275,
4024
+ "eval_wer": 7.383379672536298,
4025
+ "step": 59740
4026
+ },
4027
+ {
4028
+ "epoch": 0.96,
4029
+ "learning_rate": 4.527876248791493e-07,
4030
+ "loss": 0.0128,
4031
+ "step": 59856
4032
+ },
4033
+ {
4034
+ "epoch": 0.96,
4035
+ "learning_rate": 4.340960360941025e-07,
4036
+ "loss": 0.015,
4037
+ "step": 59972
4038
+ },
4039
+ {
4040
+ "epoch": 0.96,
4041
+ "learning_rate": 4.1540444730905576e-07,
4042
+ "loss": 0.0134,
4043
+ "step": 60088
4044
+ },
4045
+ {
4046
+ "epoch": 0.96,
4047
+ "learning_rate": 3.9671285852400905e-07,
4048
+ "loss": 0.0148,
4049
+ "step": 60204
4050
+ },
4051
+ {
4052
+ "epoch": 0.96,
4053
+ "learning_rate": 3.780212697389624e-07,
4054
+ "loss": 0.0159,
4055
+ "step": 60320
4056
+ },
4057
+ {
4058
+ "epoch": 0.96,
4059
+ "eval_loss": 0.10068144649267197,
4060
+ "eval_runtime": 785.2541,
4061
+ "eval_samples_per_second": 2.183,
4062
+ "eval_steps_per_second": 0.274,
4063
+ "eval_wer": 7.311296467922975,
4064
+ "step": 60320
4065
+ },
4066
+ {
4067
+ "epoch": 0.96,
4068
+ "learning_rate": 3.593296809539156e-07,
4069
+ "loss": 0.0145,
4070
+ "step": 60436
4071
+ },
4072
+ {
4073
+ "epoch": 0.97,
4074
+ "learning_rate": 3.4063809216886885e-07,
4075
+ "loss": 0.0149,
4076
+ "step": 60552
4077
+ },
4078
+ {
4079
+ "epoch": 0.97,
4080
+ "learning_rate": 3.2194650338382214e-07,
4081
+ "loss": 0.0158,
4082
+ "step": 60668
4083
+ },
4084
+ {
4085
+ "epoch": 0.97,
4086
+ "learning_rate": 3.0325491459877543e-07,
4087
+ "loss": 0.0161,
4088
+ "step": 60784
4089
+ },
4090
+ {
4091
+ "epoch": 0.97,
4092
+ "learning_rate": 2.8456332581372866e-07,
4093
+ "loss": 0.0127,
4094
+ "step": 60900
4095
+ },
4096
+ {
4097
+ "epoch": 0.97,
4098
+ "eval_loss": 0.10017982870340347,
4099
+ "eval_runtime": 783.6716,
4100
+ "eval_samples_per_second": 2.187,
4101
+ "eval_steps_per_second": 0.274,
4102
+ "eval_wer": 7.290701266604881,
4103
+ "step": 60900
4104
+ },
4105
+ {
4106
+ "epoch": 0.97,
4107
+ "learning_rate": 2.6587173702868195e-07,
4108
+ "loss": 0.0146,
4109
+ "step": 61016
4110
+ },
4111
+ {
4112
+ "epoch": 0.98,
4113
+ "learning_rate": 2.471801482436352e-07,
4114
+ "loss": 0.0155,
4115
+ "step": 61132
4116
+ },
4117
+ {
4118
+ "epoch": 0.98,
4119
+ "learning_rate": 2.284885594585885e-07,
4120
+ "loss": 0.0125,
4121
+ "step": 61248
4122
+ },
4123
+ {
4124
+ "epoch": 0.98,
4125
+ "learning_rate": 2.0979697067354173e-07,
4126
+ "loss": 0.0161,
4127
+ "step": 61364
4128
+ },
4129
+ {
4130
+ "epoch": 0.98,
4131
+ "learning_rate": 1.9110538188849505e-07,
4132
+ "loss": 0.0134,
4133
+ "step": 61480
4134
+ },
4135
+ {
4136
+ "epoch": 0.98,
4137
+ "eval_loss": 0.10023297369480133,
4138
+ "eval_runtime": 783.3354,
4139
+ "eval_samples_per_second": 2.188,
4140
+ "eval_steps_per_second": 0.274,
4141
+ "eval_wer": 7.249510863968696,
4142
+ "step": 61480
4143
+ },
4144
+ {
4145
+ "epoch": 0.98,
4146
+ "learning_rate": 1.7241379310344828e-07,
4147
+ "loss": 0.0153,
4148
+ "step": 61596
4149
+ },
4150
+ {
4151
+ "epoch": 0.99,
4152
+ "learning_rate": 1.5372220431840157e-07,
4153
+ "loss": 0.0154,
4154
+ "step": 61712
4155
+ },
4156
+ {
4157
+ "epoch": 0.99,
4158
+ "learning_rate": 1.3503061553335483e-07,
4159
+ "loss": 0.013,
4160
+ "step": 61828
4161
+ },
4162
+ {
4163
+ "epoch": 0.99,
4164
+ "learning_rate": 1.163390267483081e-07,
4165
+ "loss": 0.0135,
4166
+ "step": 61944
4167
+ },
4168
+ {
4169
+ "epoch": 0.99,
4170
+ "learning_rate": 9.764743796326138e-08,
4171
+ "loss": 0.0147,
4172
+ "step": 62060
4173
+ },
4174
+ {
4175
+ "epoch": 0.99,
4176
+ "eval_loss": 0.10011597722768784,
4177
+ "eval_runtime": 799.7654,
4178
+ "eval_samples_per_second": 2.143,
4179
+ "eval_steps_per_second": 0.269,
4180
+ "eval_wer": 7.300998867263927,
4181
+ "step": 62060
4182
+ },
4183
+ {
4184
+ "epoch": 0.99,
4185
+ "learning_rate": 7.895584917821464e-08,
4186
+ "loss": 0.0146,
4187
+ "step": 62176
4188
+ },
4189
+ {
4190
+ "epoch": 0.99,
4191
+ "learning_rate": 6.02642603931679e-08,
4192
+ "loss": 0.0151,
4193
+ "step": 62292
4194
+ },
4195
+ {
4196
+ "epoch": 1.0,
4197
+ "learning_rate": 4.157267160812117e-08,
4198
+ "loss": 0.0136,
4199
+ "step": 62408
4200
+ },
4201
+ {
4202
+ "epoch": 1.0,
4203
+ "learning_rate": 2.2881082823074446e-08,
4204
+ "loss": 0.0123,
4205
+ "step": 62524
4206
+ },
4207
+ {
4208
+ "epoch": 1.0,
4209
+ "learning_rate": 4.189494038027715e-09,
4210
+ "loss": 0.0144,
4211
+ "step": 62640
4212
+ },
4213
+ {
4214
+ "epoch": 1.0,
4215
+ "eval_loss": 0.09998083859682083,
4216
+ "eval_runtime": 786.9298,
4217
+ "eval_samples_per_second": 2.178,
4218
+ "eval_steps_per_second": 0.273,
4219
+ "eval_wer": 7.280403665945835,
4220
+ "step": 62640
4221
+ },
4222
+ {
4223
+ "epoch": 1.0,
4224
+ "step": 62640,
4225
+ "total_flos": 2.1279417860837375e+21,
4226
+ "train_loss": 0.051935346220949447,
4227
+ "train_runtime": 527672.5015,
4228
+ "train_samples_per_second": 1.899,
4229
+ "train_steps_per_second": 0.119
4230
+ }
4231
+ ],
4232
+ "max_steps": 62640,
4233
+ "num_train_epochs": 9223372036854775807,
4234
+ "total_flos": 2.1279417860837375e+21,
4235
+ "trial_name": null,
4236
+ "trial_params": null
4237
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28a2317acd84750ea8e7542b24100a1aebbf222a34bdbce509682a9ec011396d
3
+ size 3631
vocab.json ADDED
The diff for this file is too large to render. See raw diff