yuxiang1990 commited on
Commit
93375d7
1 Parent(s): 14a4922

Upload 9 files

Browse files
config.json ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/data1/CACHE/huggingface/hg_training/base",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu",
5
+ "apply_spec_augment": false,
6
+ "architectures": [
7
+ "WhisperForConditionalGeneration"
8
+ ],
9
+ "attention_dropout": 0.0,
10
+ "begin_suppress_tokens": [
11
+ 220,
12
+ 50257
13
+ ],
14
+ "bos_token_id": 50257,
15
+ "classifier_proj_size": 256,
16
+ "d_model": 512,
17
+ "decoder_attention_heads": 8,
18
+ "decoder_ffn_dim": 2048,
19
+ "decoder_layerdrop": 0.0,
20
+ "decoder_layers": 6,
21
+ "decoder_start_token_id": 50258,
22
+ "dropout": 0.0,
23
+ "encoder_attention_heads": 8,
24
+ "encoder_ffn_dim": 2048,
25
+ "encoder_layerdrop": 0.0,
26
+ "encoder_layers": 6,
27
+ "eos_token_id": 50257,
28
+ "forced_decoder_ids": [
29
+ [
30
+ 1,
31
+ 50260
32
+ ],
33
+ [
34
+ 2,
35
+ 50359
36
+ ],
37
+ [
38
+ 3,
39
+ 50363
40
+ ]
41
+ ],
42
+ "init_std": 0.02,
43
+ "is_encoder_decoder": true,
44
+ "mask_feature_length": 10,
45
+ "mask_feature_min_masks": 0,
46
+ "mask_feature_prob": 0.0,
47
+ "mask_time_length": 10,
48
+ "mask_time_min_masks": 2,
49
+ "mask_time_prob": 0.05,
50
+ "max_length": 448,
51
+ "max_source_positions": 1500,
52
+ "max_target_positions": 448,
53
+ "median_filter_width": 7,
54
+ "model_type": "whisper",
55
+ "num_hidden_layers": 6,
56
+ "num_mel_bins": 80,
57
+ "pad_token_id": 50257,
58
+ "scale_embedding": false,
59
+ "suppress_tokens": [],
60
+ "torch_dtype": "float32",
61
+ "transformers_version": "4.38.2",
62
+ "use_cache": true,
63
+ "use_weighted_layer_sum": false,
64
+ "vocab_size": 51865
65
+ }
generation_config.json ADDED
@@ -0,0 +1,260 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alignment_heads": [
3
+ [
4
+ 3,
5
+ 1
6
+ ],
7
+ [
8
+ 4,
9
+ 2
10
+ ],
11
+ [
12
+ 4,
13
+ 3
14
+ ],
15
+ [
16
+ 4,
17
+ 7
18
+ ],
19
+ [
20
+ 5,
21
+ 1
22
+ ],
23
+ [
24
+ 5,
25
+ 2
26
+ ],
27
+ [
28
+ 5,
29
+ 4
30
+ ],
31
+ [
32
+ 5,
33
+ 6
34
+ ]
35
+ ],
36
+ "begin_suppress_tokens": [
37
+ 220,
38
+ 50257
39
+ ],
40
+ "bos_token_id": 50257,
41
+ "decoder_start_token_id": 50258,
42
+ "eos_token_id": 50257,
43
+ "forced_decoder_ids": [
44
+ [
45
+ 1,
46
+ 50260
47
+ ],
48
+ [
49
+ 2,
50
+ 50359
51
+ ],
52
+ [
53
+ 3,
54
+ 50363
55
+ ]
56
+ ],
57
+ "is_multilingual": true,
58
+ "lang_to_id": {
59
+ "<|af|>": 50327,
60
+ "<|am|>": 50334,
61
+ "<|ar|>": 50272,
62
+ "<|as|>": 50350,
63
+ "<|az|>": 50304,
64
+ "<|ba|>": 50355,
65
+ "<|be|>": 50330,
66
+ "<|bg|>": 50292,
67
+ "<|bn|>": 50302,
68
+ "<|bo|>": 50347,
69
+ "<|br|>": 50309,
70
+ "<|bs|>": 50315,
71
+ "<|ca|>": 50270,
72
+ "<|cs|>": 50283,
73
+ "<|cy|>": 50297,
74
+ "<|da|>": 50285,
75
+ "<|de|>": 50261,
76
+ "<|el|>": 50281,
77
+ "<|en|>": 50259,
78
+ "<|es|>": 50262,
79
+ "<|et|>": 50307,
80
+ "<|eu|>": 50310,
81
+ "<|fa|>": 50300,
82
+ "<|fi|>": 50277,
83
+ "<|fo|>": 50338,
84
+ "<|fr|>": 50265,
85
+ "<|gl|>": 50319,
86
+ "<|gu|>": 50333,
87
+ "<|haw|>": 50352,
88
+ "<|ha|>": 50354,
89
+ "<|he|>": 50279,
90
+ "<|hi|>": 50276,
91
+ "<|hr|>": 50291,
92
+ "<|ht|>": 50339,
93
+ "<|hu|>": 50286,
94
+ "<|hy|>": 50312,
95
+ "<|id|>": 50275,
96
+ "<|is|>": 50311,
97
+ "<|it|>": 50274,
98
+ "<|ja|>": 50266,
99
+ "<|jw|>": 50356,
100
+ "<|ka|>": 50329,
101
+ "<|kk|>": 50316,
102
+ "<|km|>": 50323,
103
+ "<|kn|>": 50306,
104
+ "<|ko|>": 50264,
105
+ "<|la|>": 50294,
106
+ "<|lb|>": 50345,
107
+ "<|ln|>": 50353,
108
+ "<|lo|>": 50336,
109
+ "<|lt|>": 50293,
110
+ "<|lv|>": 50301,
111
+ "<|mg|>": 50349,
112
+ "<|mi|>": 50295,
113
+ "<|mk|>": 50308,
114
+ "<|ml|>": 50296,
115
+ "<|mn|>": 50314,
116
+ "<|mr|>": 50320,
117
+ "<|ms|>": 50282,
118
+ "<|mt|>": 50343,
119
+ "<|my|>": 50346,
120
+ "<|ne|>": 50313,
121
+ "<|nl|>": 50271,
122
+ "<|nn|>": 50342,
123
+ "<|no|>": 50288,
124
+ "<|oc|>": 50328,
125
+ "<|pa|>": 50321,
126
+ "<|pl|>": 50269,
127
+ "<|ps|>": 50340,
128
+ "<|pt|>": 50267,
129
+ "<|ro|>": 50284,
130
+ "<|ru|>": 50263,
131
+ "<|sa|>": 50344,
132
+ "<|sd|>": 50332,
133
+ "<|si|>": 50322,
134
+ "<|sk|>": 50298,
135
+ "<|sl|>": 50305,
136
+ "<|sn|>": 50324,
137
+ "<|so|>": 50326,
138
+ "<|sq|>": 50317,
139
+ "<|sr|>": 50303,
140
+ "<|su|>": 50357,
141
+ "<|sv|>": 50273,
142
+ "<|sw|>": 50318,
143
+ "<|ta|>": 50287,
144
+ "<|te|>": 50299,
145
+ "<|tg|>": 50331,
146
+ "<|th|>": 50289,
147
+ "<|tk|>": 50341,
148
+ "<|tl|>": 50348,
149
+ "<|tr|>": 50268,
150
+ "<|tt|>": 50351,
151
+ "<|uk|>": 50280,
152
+ "<|ur|>": 50290,
153
+ "<|uz|>": 50337,
154
+ "<|vi|>": 50278,
155
+ "<|yi|>": 50335,
156
+ "<|yo|>": 50325,
157
+ "<|zh|>": 50260
158
+ },
159
+ "max_initial_timestamp_index": 50,
160
+ "max_length": 448,
161
+ "no_timestamps_token_id": 50363,
162
+ "pad_token_id": 50257,
163
+ "prev_sot_token_id": 50361,
164
+ "return_timestamps": false,
165
+ "suppress_tokens": [
166
+ 1,
167
+ 2,
168
+ 7,
169
+ 8,
170
+ 9,
171
+ 10,
172
+ 14,
173
+ 25,
174
+ 26,
175
+ 27,
176
+ 28,
177
+ 29,
178
+ 31,
179
+ 58,
180
+ 59,
181
+ 60,
182
+ 61,
183
+ 62,
184
+ 63,
185
+ 90,
186
+ 91,
187
+ 92,
188
+ 93,
189
+ 359,
190
+ 503,
191
+ 522,
192
+ 542,
193
+ 873,
194
+ 893,
195
+ 902,
196
+ 918,
197
+ 922,
198
+ 931,
199
+ 1350,
200
+ 1853,
201
+ 1982,
202
+ 2460,
203
+ 2627,
204
+ 3246,
205
+ 3253,
206
+ 3268,
207
+ 3536,
208
+ 3846,
209
+ 3961,
210
+ 4183,
211
+ 4667,
212
+ 6585,
213
+ 6647,
214
+ 7273,
215
+ 9061,
216
+ 9383,
217
+ 10428,
218
+ 10929,
219
+ 11938,
220
+ 12033,
221
+ 12331,
222
+ 12562,
223
+ 13793,
224
+ 14157,
225
+ 14635,
226
+ 15265,
227
+ 15618,
228
+ 16553,
229
+ 16604,
230
+ 18362,
231
+ 18956,
232
+ 20075,
233
+ 21675,
234
+ 22520,
235
+ 26130,
236
+ 26161,
237
+ 26435,
238
+ 28279,
239
+ 29464,
240
+ 31650,
241
+ 32302,
242
+ 32470,
243
+ 36865,
244
+ 42863,
245
+ 47425,
246
+ 49870,
247
+ 50254,
248
+ 50258,
249
+ 50358,
250
+ 50359,
251
+ 50360,
252
+ 50361,
253
+ 50362
254
+ ],
255
+ "task_to_id": {
256
+ "transcribe": 50359,
257
+ "translate": 50358
258
+ },
259
+ "transformers_version": "4.38.2"
260
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46a565022c1160a3742c322e206970cff34b92946418a05b4b4c6c1ff34d631e
3
+ size 290403936
normalizer.json ADDED
@@ -0,0 +1,1742 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "accessorise": "accessorize",
3
+ "accessorised": "accessorized",
4
+ "accessorises": "accessorizes",
5
+ "accessorising": "accessorizing",
6
+ "acclimatisation": "acclimatization",
7
+ "acclimatise": "acclimatize",
8
+ "acclimatised": "acclimatized",
9
+ "acclimatises": "acclimatizes",
10
+ "acclimatising": "acclimatizing",
11
+ "accoutrements": "accouterments",
12
+ "aeon": "eon",
13
+ "aeons": "eons",
14
+ "aerogramme": "aerogram",
15
+ "aerogrammes": "aerograms",
16
+ "aeroplane": "airplane",
17
+ "aeroplanes": "airplanes",
18
+ "aesthete": "esthete",
19
+ "aesthetes": "esthetes",
20
+ "aesthetic": "esthetic",
21
+ "aesthetically": "esthetically",
22
+ "aesthetics": "esthetics",
23
+ "aetiology": "etiology",
24
+ "ageing": "aging",
25
+ "aggrandisement": "aggrandizement",
26
+ "agonise": "agonize",
27
+ "agonised": "agonized",
28
+ "agonises": "agonizes",
29
+ "agonising": "agonizing",
30
+ "agonisingly": "agonizingly",
31
+ "almanack": "almanac",
32
+ "almanacks": "almanacs",
33
+ "aluminium": "aluminum",
34
+ "amortisable": "amortizable",
35
+ "amortisation": "amortization",
36
+ "amortisations": "amortizations",
37
+ "amortise": "amortize",
38
+ "amortised": "amortized",
39
+ "amortises": "amortizes",
40
+ "amortising": "amortizing",
41
+ "amphitheatre": "amphitheater",
42
+ "amphitheatres": "amphitheaters",
43
+ "anaemia": "anemia",
44
+ "anaemic": "anemic",
45
+ "anaesthesia": "anesthesia",
46
+ "anaesthetic": "anesthetic",
47
+ "anaesthetics": "anesthetics",
48
+ "anaesthetise": "anesthetize",
49
+ "anaesthetised": "anesthetized",
50
+ "anaesthetises": "anesthetizes",
51
+ "anaesthetising": "anesthetizing",
52
+ "anaesthetist": "anesthetist",
53
+ "anaesthetists": "anesthetists",
54
+ "anaesthetize": "anesthetize",
55
+ "anaesthetized": "anesthetized",
56
+ "anaesthetizes": "anesthetizes",
57
+ "anaesthetizing": "anesthetizing",
58
+ "analogue": "analog",
59
+ "analogues": "analogs",
60
+ "analyse": "analyze",
61
+ "analysed": "analyzed",
62
+ "analyses": "analyzes",
63
+ "analysing": "analyzing",
64
+ "anglicise": "anglicize",
65
+ "anglicised": "anglicized",
66
+ "anglicises": "anglicizes",
67
+ "anglicising": "anglicizing",
68
+ "annualised": "annualized",
69
+ "antagonise": "antagonize",
70
+ "antagonised": "antagonized",
71
+ "antagonises": "antagonizes",
72
+ "antagonising": "antagonizing",
73
+ "apologise": "apologize",
74
+ "apologised": "apologized",
75
+ "apologises": "apologizes",
76
+ "apologising": "apologizing",
77
+ "appal": "appall",
78
+ "appals": "appalls",
79
+ "appetiser": "appetizer",
80
+ "appetisers": "appetizers",
81
+ "appetising": "appetizing",
82
+ "appetisingly": "appetizingly",
83
+ "arbour": "arbor",
84
+ "arbours": "arbors",
85
+ "archaeologically": "archeologically",
86
+ "archaeologist": "archeologist",
87
+ "archaeologists": "archeologists",
88
+ "archaeology": "archeology</span>",
89
+ "archeological": "archaeological",
90
+ "ardour": "ardor",
91
+ "armour": "armor",
92
+ "armoured": "armored",
93
+ "armourer": "armorer",
94
+ "armourers": "armorers",
95
+ "armouries": "armories",
96
+ "armoury": "armory",
97
+ "artefact": "artifact",
98
+ "artefacts": "artifacts",
99
+ "authorise": "authorize",
100
+ "authorised": "authorized",
101
+ "authorises": "authorizes",
102
+ "authorising": "authorizing",
103
+ "axe": "ax",
104
+ "backpedalled": "backpedaled",
105
+ "backpedalling": "backpedaling",
106
+ "bannister": "banister",
107
+ "bannisters": "banisters",
108
+ "baptise": "baptize",
109
+ "baptised": "baptized",
110
+ "baptises": "baptizes",
111
+ "baptising": "baptizing",
112
+ "bastardise": "bastardize",
113
+ "bastardised": "bastardized",
114
+ "bastardises": "bastardizes",
115
+ "bastardising": "bastardizing",
116
+ "battleax": "battleaxe",
117
+ "baulk": "balk",
118
+ "baulked": "balked",
119
+ "baulking": "balking",
120
+ "baulks": "balks",
121
+ "bedevilled": "bedeviled",
122
+ "bedevilling": "bedeviling",
123
+ "behaviour": "behavior",
124
+ "behavioural": "behavioral",
125
+ "behaviourism": "behaviorism",
126
+ "behaviourist": "behaviorist",
127
+ "behaviourists": "behaviorists",
128
+ "behaviours": "behaviors",
129
+ "behove": "behoove",
130
+ "behoved": "behooved",
131
+ "behoves": "behooves",
132
+ "bejewelled": "bejeweled",
133
+ "belabour": "belabor",
134
+ "belaboured": "belabored",
135
+ "belabouring": "belaboring",
136
+ "belabours": "belabors",
137
+ "bevelled": "beveled",
138
+ "bevvies": "bevies",
139
+ "bevvy": "bevy",
140
+ "biassed": "biased",
141
+ "biassing": "biasing",
142
+ "bingeing": "binging",
143
+ "bougainvillaea": "bougainvillea",
144
+ "bougainvillaeas": "bougainvilleas",
145
+ "bowdlerise": "bowdlerize",
146
+ "bowdlerised": "bowdlerized",
147
+ "bowdlerises": "bowdlerizes",
148
+ "bowdlerising": "bowdlerizing",
149
+ "breathalyse": "breathalyze",
150
+ "breathalysed": "breathalyzed",
151
+ "breathalyser": "breathalyzer",
152
+ "breathalysers": "breathalyzers",
153
+ "breathalyses": "breathalyzes",
154
+ "breathalysing": "breathalyzing",
155
+ "brutalise": "brutalize",
156
+ "brutalised": "brutalized",
157
+ "brutalises": "brutalizes",
158
+ "brutalising": "brutalizing",
159
+ "busses": "buses",
160
+ "bussing": "busing",
161
+ "caesarean": "cesarean",
162
+ "caesareans": "cesareans",
163
+ "calibre": "caliber",
164
+ "calibres": "calibers",
165
+ "calliper": "caliper",
166
+ "callipers": "calipers",
167
+ "callisthenics": "calisthenics",
168
+ "canalise": "canalize",
169
+ "canalised": "canalized",
170
+ "canalises": "canalizes",
171
+ "canalising": "canalizing",
172
+ "cancelation": "cancellation",
173
+ "cancelations": "cancellations",
174
+ "cancelled": "canceled",
175
+ "cancelling": "canceling",
176
+ "candour": "candor",
177
+ "cannibalise": "cannibalize",
178
+ "cannibalised": "cannibalized",
179
+ "cannibalises": "cannibalizes",
180
+ "cannibalising": "cannibalizing",
181
+ "canonise": "canonize",
182
+ "canonised": "canonized",
183
+ "canonises": "canonizes",
184
+ "canonising": "canonizing",
185
+ "capitalise": "capitalize",
186
+ "capitalised": "capitalized",
187
+ "capitalises": "capitalizes",
188
+ "capitalising": "capitalizing",
189
+ "caramelise": "caramelize",
190
+ "caramelised": "caramelized",
191
+ "caramelises": "caramelizes",
192
+ "caramelising": "caramelizing",
193
+ "carbonise": "carbonize",
194
+ "carbonised": "carbonized",
195
+ "carbonises": "carbonizes",
196
+ "carbonising": "carbonizing",
197
+ "carolled": "caroled",
198
+ "carolling": "caroling",
199
+ "catalogue": "catalog",
200
+ "catalogued": "cataloged",
201
+ "catalogues": "catalogs",
202
+ "cataloguing": "cataloging",
203
+ "catalyse": "catalyze",
204
+ "catalysed": "catalyzed",
205
+ "catalyses": "catalyzes",
206
+ "catalysing": "catalyzing",
207
+ "categorise": "categorize",
208
+ "categorised": "categorized",
209
+ "categorises": "categorizes",
210
+ "categorising": "categorizing",
211
+ "cauterise": "cauterize",
212
+ "cauterised": "cauterized",
213
+ "cauterises": "cauterizes",
214
+ "cauterising": "cauterizing",
215
+ "cavilled": "caviled",
216
+ "cavilling": "caviling",
217
+ "centigramme": "centigram",
218
+ "centigrammes": "centigrams",
219
+ "centilitre": "centiliter",
220
+ "centilitres": "centiliters",
221
+ "centimetre": "centimeter",
222
+ "centimetres": "centimeters",
223
+ "centralise": "centralize",
224
+ "centralised": "centralized",
225
+ "centralises": "centralizes",
226
+ "centralising": "centralizing",
227
+ "centre": "center",
228
+ "centred": "centered",
229
+ "centrefold": "centerfold",
230
+ "centrefolds": "centerfolds",
231
+ "centrepiece": "centerpiece",
232
+ "centrepieces": "centerpieces",
233
+ "centres": "centers",
234
+ "channelled": "channeled",
235
+ "channelling": "channeling",
236
+ "characterise": "characterize",
237
+ "characterised": "characterized",
238
+ "characterises": "characterizes",
239
+ "characterising": "characterizing",
240
+ "cheque": "check",
241
+ "chequebook": "checkbook",
242
+ "chequebooks": "checkbooks",
243
+ "chequered": "checkered",
244
+ "cheques": "checks",
245
+ "chilli": "chili",
246
+ "chimaera": "chimera",
247
+ "chimaeras": "chimeras",
248
+ "chiselled": "chiseled",
249
+ "chiselling": "chiseling",
250
+ "circularise": "circularize",
251
+ "circularised": "circularized",
252
+ "circularises": "circularizes",
253
+ "circularising": "circularizing",
254
+ "civilise": "civilize",
255
+ "civilised": "civilized",
256
+ "civilises": "civilizes",
257
+ "civilising": "civilizing",
258
+ "clamour": "clamor",
259
+ "clamoured": "clamored",
260
+ "clamouring": "clamoring",
261
+ "clamours": "clamors",
262
+ "clangour": "clangor",
263
+ "clarinettist": "clarinetist",
264
+ "clarinettists": "clarinetists",
265
+ "collectivise": "collectivize",
266
+ "collectivised": "collectivized",
267
+ "collectivises": "collectivizes",
268
+ "collectivising": "collectivizing",
269
+ "colonisation": "colonization",
270
+ "colonise": "colonize",
271
+ "colonised": "colonized",
272
+ "coloniser": "colonizer",
273
+ "colonisers": "colonizers",
274
+ "colonises": "colonizes",
275
+ "colonising": "colonizing",
276
+ "colour": "color",
277
+ "colourant": "colorant",
278
+ "colourants": "colorants",
279
+ "coloured": "colored",
280
+ "coloureds": "coloreds",
281
+ "colourful": "colorful",
282
+ "colourfully": "colorfully",
283
+ "colouring": "coloring",
284
+ "colourize": "colorize",
285
+ "colourized": "colorized",
286
+ "colourizes": "colorizes",
287
+ "colourizing": "colorizing",
288
+ "colourless": "colorless",
289
+ "colours": "colors",
290
+ "commercialise": "commercialize",
291
+ "commercialised": "commercialized",
292
+ "commercialises": "commercializes",
293
+ "commercialising": "commercializing",
294
+ "compartmentalise": "compartmentalize",
295
+ "compartmentalised": "compartmentalized",
296
+ "compartmentalises": "compartmentalizes",
297
+ "compartmentalising": "compartmentalizing",
298
+ "computerise": "computerize",
299
+ "computerised": "computerized",
300
+ "computerises": "computerizes",
301
+ "computerising": "computerizing",
302
+ "conceptualise": "conceptualize",
303
+ "conceptualised": "conceptualized",
304
+ "conceptualises": "conceptualizes",
305
+ "conceptualising": "conceptualizing",
306
+ "connexion": "connection",
307
+ "connexions": "connections",
308
+ "contextualise": "contextualize",
309
+ "contextualised": "contextualized",
310
+ "contextualises": "contextualizes",
311
+ "contextualising": "contextualizing",
312
+ "cosier": "cozier",
313
+ "cosies": "cozies",
314
+ "cosiest": "coziest",
315
+ "cosily": "cozily",
316
+ "cosiness": "coziness",
317
+ "cosy": "cozy",
318
+ "councillor": "councilor",
319
+ "councillors": "councilors",
320
+ "counselled": "counseled",
321
+ "counselling": "counseling",
322
+ "counsellor": "counselor",
323
+ "counsellors": "counselors",
324
+ "crenelated": "crenellated",
325
+ "criminalise": "criminalize",
326
+ "criminalised": "criminalized",
327
+ "criminalises": "criminalizes",
328
+ "criminalising": "criminalizing",
329
+ "criticise": "criticize",
330
+ "criticised": "criticized",
331
+ "criticises": "criticizes",
332
+ "criticising": "criticizing",
333
+ "crueller": "crueler",
334
+ "cruellest": "cruelest",
335
+ "crystallisation": "crystallization",
336
+ "crystallise": "crystallize",
337
+ "crystallised": "crystallized",
338
+ "crystallises": "crystallizes",
339
+ "crystallising": "crystallizing",
340
+ "cudgelled": "cudgeled",
341
+ "cudgelling": "cudgeling",
342
+ "customise": "customize",
343
+ "customised": "customized",
344
+ "customises": "customizes",
345
+ "customising": "customizing",
346
+ "cypher": "cipher",
347
+ "cyphers": "ciphers",
348
+ "decentralisation": "decentralization",
349
+ "decentralise": "decentralize",
350
+ "decentralised": "decentralized",
351
+ "decentralises": "decentralizes",
352
+ "decentralising": "decentralizing",
353
+ "decriminalisation": "decriminalization",
354
+ "decriminalise": "decriminalize",
355
+ "decriminalised": "decriminalized",
356
+ "decriminalises": "decriminalizes",
357
+ "decriminalising": "decriminalizing",
358
+ "defence": "defense",
359
+ "defenceless": "defenseless",
360
+ "defences": "defenses",
361
+ "dehumanisation": "dehumanization",
362
+ "dehumanise": "dehumanize",
363
+ "dehumanised": "dehumanized",
364
+ "dehumanises": "dehumanizes",
365
+ "dehumanising": "dehumanizing",
366
+ "demeanour": "demeanor",
367
+ "demilitarisation": "demilitarization",
368
+ "demilitarise": "demilitarize",
369
+ "demilitarised": "demilitarized",
370
+ "demilitarises": "demilitarizes",
371
+ "demilitarising": "demilitarizing",
372
+ "demobilisation": "demobilization",
373
+ "demobilise": "demobilize",
374
+ "demobilised": "demobilized",
375
+ "demobilises": "demobilizes",
376
+ "demobilising": "demobilizing",
377
+ "democratisation": "democratization",
378
+ "democratise": "democratize",
379
+ "democratised": "democratized",
380
+ "democratises": "democratizes",
381
+ "democratising": "democratizing",
382
+ "demonise": "demonize",
383
+ "demonised": "demonized",
384
+ "demonises": "demonizes",
385
+ "demonising": "demonizing",
386
+ "demoralisation": "demoralization",
387
+ "demoralise": "demoralize",
388
+ "demoralised": "demoralized",
389
+ "demoralises": "demoralizes",
390
+ "demoralising": "demoralizing",
391
+ "denationalisation": "denationalization",
392
+ "denationalise": "denationalize",
393
+ "denationalised": "denationalized",
394
+ "denationalises": "denationalizes",
395
+ "denationalising": "denationalizing",
396
+ "deodorise": "deodorize",
397
+ "deodorised": "deodorized",
398
+ "deodorises": "deodorizes",
399
+ "deodorising": "deodorizing",
400
+ "depersonalise": "depersonalize",
401
+ "depersonalised": "depersonalized",
402
+ "depersonalises": "depersonalizes",
403
+ "depersonalising": "depersonalizing",
404
+ "deputise": "deputize",
405
+ "deputised": "deputized",
406
+ "deputises": "deputizes",
407
+ "deputising": "deputizing",
408
+ "desensitisation": "desensitization",
409
+ "desensitise": "desensitize",
410
+ "desensitised": "desensitized",
411
+ "desensitises": "desensitizes",
412
+ "desensitising": "desensitizing",
413
+ "destabilisation": "destabilization",
414
+ "destabilise": "destabilize",
415
+ "destabilised": "destabilized",
416
+ "destabilises": "destabilizes",
417
+ "destabilising": "destabilizing",
418
+ "dialled": "dialed",
419
+ "dialling": "dialing",
420
+ "dialogue": "dialog",
421
+ "dialogues": "dialogs",
422
+ "diarrhoea": "diarrhea",
423
+ "digitise": "digitize",
424
+ "digitised": "digitized",
425
+ "digitises": "digitizes",
426
+ "digitising": "digitizing",
427
+ "disc": "disk",
428
+ "discolour": "discolor",
429
+ "discoloured": "discolored",
430
+ "discolouring": "discoloring",
431
+ "discolours": "discolors",
432
+ "discs": "disks",
433
+ "disembowelled": "disemboweled",
434
+ "disembowelling": "disemboweling",
435
+ "disfavour": "disfavor",
436
+ "dishevelled": "disheveled",
437
+ "dishonour": "dishonor",
438
+ "dishonourable": "dishonorable",
439
+ "dishonourably": "dishonorably",
440
+ "dishonoured": "dishonored",
441
+ "dishonouring": "dishonoring",
442
+ "dishonours": "dishonors",
443
+ "disorganisation": "disorganization",
444
+ "disorganised": "disorganized",
445
+ "distil": "distill",
446
+ "distils": "distills",
447
+ "dramatisation": "dramatization",
448
+ "dramatisations": "dramatizations",
449
+ "dramatise": "dramatize",
450
+ "dramatised": "dramatized",
451
+ "dramatises": "dramatizes",
452
+ "dramatising": "dramatizing",
453
+ "draught": "draft",
454
+ "draughtboard": "draftboard",
455
+ "draughtboards": "draftboards",
456
+ "draughtier": "draftier",
457
+ "draughtiest": "draftiest",
458
+ "draughts": "drafts",
459
+ "draughtsman": "draftsman",
460
+ "draughtsmanship": "draftsmanship",
461
+ "draughtsmen": "draftsmen",
462
+ "draughtswoman": "draftswoman",
463
+ "draughtswomen": "draftswomen",
464
+ "draughty": "drafty",
465
+ "drivelled": "driveled",
466
+ "drivelling": "driveling",
467
+ "duelled": "dueled",
468
+ "duelling": "dueling",
469
+ "economise": "economize",
470
+ "economised": "economized",
471
+ "economises": "economizes",
472
+ "economising": "economizing",
473
+ "editorialise": "editorialize",
474
+ "editorialised": "editorialized",
475
+ "editorialises": "editorializes",
476
+ "editorialising": "editorializing",
477
+ "edoema": "edema",
478
+ "empathise": "empathize",
479
+ "empathised": "empathized",
480
+ "empathises": "empathizes",
481
+ "empathising": "empathizing",
482
+ "emphasise": "emphasize",
483
+ "emphasised": "emphasized",
484
+ "emphasises": "emphasizes",
485
+ "emphasising": "emphasizing",
486
+ "enamelled": "enameled",
487
+ "enamelling": "enameling",
488
+ "enamoured": "enamored",
489
+ "encyclopaedia": "encyclopedia",
490
+ "encyclopaedias": "encyclopedias",
491
+ "encyclopaedic": "encyclopedic",
492
+ "endeavour": "endeavor",
493
+ "endeavoured": "endeavored",
494
+ "endeavouring": "endeavoring",
495
+ "endeavours": "endeavors",
496
+ "energise": "energize",
497
+ "energised": "energized",
498
+ "energises": "energizes",
499
+ "energising": "energizing",
500
+ "enrol": "enroll",
501
+ "enrols": "enrolls",
502
+ "enthral": "enthrall",
503
+ "enthrals": "enthralls",
504
+ "epaulette": "epaulet",
505
+ "epaulettes": "epaulets",
506
+ "epicentre": "epicenter",
507
+ "epicentres": "epicenters",
508
+ "epilogue": "epilog",
509
+ "epilogues": "epilogs",
510
+ "epitomise": "epitomize",
511
+ "epitomised": "epitomized",
512
+ "epitomises": "epitomizes",
513
+ "epitomising": "epitomizing",
514
+ "equalisation": "equalization",
515
+ "equalise": "equalize",
516
+ "equalised": "equalized",
517
+ "equaliser": "equalizer",
518
+ "equalisers": "equalizers",
519
+ "equalises": "equalizes",
520
+ "equalising": "equalizing",
521
+ "eulogise": "eulogize",
522
+ "eulogised": "eulogized",
523
+ "eulogises": "eulogizes",
524
+ "eulogising": "eulogizing",
525
+ "evangelise": "evangelize",
526
+ "evangelised": "evangelized",
527
+ "evangelises": "evangelizes",
528
+ "evangelising": "evangelizing",
529
+ "exorcise": "exorcize",
530
+ "exorcised": "exorcized",
531
+ "exorcises": "exorcizes",
532
+ "exorcising": "exorcizing",
533
+ "extemporisation": "extemporization",
534
+ "extemporise": "extemporize",
535
+ "extemporised": "extemporized",
536
+ "extemporises": "extemporizes",
537
+ "extemporising": "extemporizing",
538
+ "externalisation": "externalization",
539
+ "externalisations": "externalizations",
540
+ "externalise": "externalize",
541
+ "externalised": "externalized",
542
+ "externalises": "externalizes",
543
+ "externalising": "externalizing",
544
+ "factorise": "factorize",
545
+ "factorised": "factorized",
546
+ "factorises": "factorizes",
547
+ "factorising": "factorizing",
548
+ "faecal": "fecal",
549
+ "faeces": "feces",
550
+ "familiarisation": "familiarization",
551
+ "familiarise": "familiarize",
552
+ "familiarised": "familiarized",
553
+ "familiarises": "familiarizes",
554
+ "familiarising": "familiarizing",
555
+ "fantasise": "fantasize",
556
+ "fantasised": "fantasized",
557
+ "fantasises": "fantasizes",
558
+ "fantasising": "fantasizing",
559
+ "favour": "favor",
560
+ "favourable": "favorable",
561
+ "favourably": "favorably",
562
+ "favoured": "favored",
563
+ "favouring": "favoring",
564
+ "favourite": "favorite",
565
+ "favourites": "favorites",
566
+ "favouritism": "favoritism",
567
+ "favours": "favors",
568
+ "feminise": "feminize",
569
+ "feminised": "feminized",
570
+ "feminises": "feminizes",
571
+ "feminising": "feminizing",
572
+ "fertilisation": "fertilization",
573
+ "fertilise": "fertilize",
574
+ "fertilised": "fertilized",
575
+ "fertiliser": "fertilizer",
576
+ "fertilisers": "fertilizers",
577
+ "fertilises": "fertilizes",
578
+ "fertilising": "fertilizing",
579
+ "fervour": "fervor",
580
+ "fibre": "fiber",
581
+ "fibreglass": "fiberglass",
582
+ "fibres": "fibers",
583
+ "fictionalisation": "fictionalization",
584
+ "fictionalisations": "fictionalizations",
585
+ "fictionalise": "fictionalize",
586
+ "fictionalised": "fictionalized",
587
+ "fictionalises": "fictionalizes",
588
+ "fictionalising": "fictionalizing",
589
+ "fillet": "filet",
590
+ "filleted": "fileted",
591
+ "filleting": "fileting",
592
+ "fillets": "filets",
593
+ "finalisation": "finalization",
594
+ "finalise": "finalize",
595
+ "finalised": "finalized",
596
+ "finalises": "finalizes",
597
+ "finalising": "finalizing",
598
+ "flautist": "flutist",
599
+ "flautists": "flutists",
600
+ "flavour": "flavor",
601
+ "flavoured": "flavored",
602
+ "flavouring": "flavoring",
603
+ "flavourings": "flavorings",
604
+ "flavourless": "flavorless",
605
+ "flavours": "flavors",
606
+ "flavoursome": "flavorsome",
607
+ "flyer / flier": "flier / flyer",
608
+ "foetal": "fetal",
609
+ "foetid": "fetid",
610
+ "foetus": "fetus",
611
+ "foetuses": "fetuses",
612
+ "formalisation": "formalization",
613
+ "formalise": "formalize",
614
+ "formalised": "formalized",
615
+ "formalises": "formalizes",
616
+ "formalising": "formalizing",
617
+ "fossilisation": "fossilization",
618
+ "fossilise": "fossilize",
619
+ "fossilised": "fossilized",
620
+ "fossilises": "fossilizes",
621
+ "fossilising": "fossilizing",
622
+ "fraternisation": "fraternization",
623
+ "fraternise": "fraternize",
624
+ "fraternised": "fraternized",
625
+ "fraternises": "fraternizes",
626
+ "fraternising": "fraternizing",
627
+ "fulfil": "fulfill",
628
+ "fulfilment": "fulfillment",
629
+ "fulfils": "fulfills",
630
+ "funnelled": "funneled",
631
+ "funnelling": "funneling",
632
+ "gage": "gauge",
633
+ "gaged": "gauged",
634
+ "gages": "gauges",
635
+ "gaging": "gauging",
636
+ "galvanise": "galvanize",
637
+ "galvanised": "galvanized",
638
+ "galvanises": "galvanizes",
639
+ "galvanising": "galvanizing",
640
+ "gambolled": "gamboled",
641
+ "gambolling": "gamboling",
642
+ "gaol": "jail",
643
+ "gaolbird": "jailbird",
644
+ "gaolbirds": "jailbirds",
645
+ "gaolbreak": "jailbreak",
646
+ "gaolbreaks": "jailbreaks",
647
+ "gaoled": "jailed",
648
+ "gaoler": "jailer",
649
+ "gaolers": "jailers",
650
+ "gaoling": "jailing",
651
+ "gaols": "jails",
652
+ "gasses": "gases",
653
+ "generalisation": "generalization",
654
+ "generalisations": "generalizations",
655
+ "generalise": "generalize",
656
+ "generalised": "generalized",
657
+ "generalises": "generalizes",
658
+ "generalising": "generalizing",
659
+ "ghettoise": "ghettoize",
660
+ "ghettoised": "ghettoized",
661
+ "ghettoises": "ghettoizes",
662
+ "ghettoising": "ghettoizing",
663
+ "gipsies": "gypsies",
664
+ "glamor": "glamour",
665
+ "glamorise": "glamorize",
666
+ "glamorised": "glamorized",
667
+ "glamorises": "glamorizes",
668
+ "glamorising": "glamorizing",
669
+ "globalisation": "globalization",
670
+ "globalise": "globalize",
671
+ "globalised": "globalized",
672
+ "globalises": "globalizes",
673
+ "globalising": "globalizing",
674
+ "glueing": "gluing",
675
+ "goitre": "goiter",
676
+ "goitres": "goiters",
677
+ "gonorrhoea": "gonorrhea",
678
+ "gramme": "gram",
679
+ "grammes": "grams",
680
+ "gravelled": "graveled",
681
+ "grey": "gray",
682
+ "greyed": "grayed",
683
+ "greying": "graying",
684
+ "greyish": "grayish",
685
+ "greyness": "grayness",
686
+ "greys": "grays",
687
+ "grovelled": "groveled",
688
+ "grovelling": "groveling",
689
+ "groyne": "groin",
690
+ "groynes": "groins",
691
+ "gruelling": "grueling",
692
+ "gruellingly": "gruelingly",
693
+ "gryphon": "griffin",
694
+ "gryphons": "griffins",
695
+ "gynaecological": "gynecological",
696
+ "gynaecologist": "gynecologist",
697
+ "gynaecologists": "gynecologists",
698
+ "gynaecology": "gynecology",
699
+ "haematological": "hematological",
700
+ "haematologist": "hematologist",
701
+ "haematologists": "hematologists",
702
+ "haematology": "hematology",
703
+ "haemoglobin": "hemoglobin",
704
+ "haemophilia": "hemophilia",
705
+ "haemophiliac": "hemophiliac",
706
+ "haemophiliacs": "hemophiliacs",
707
+ "haemorrhage": "hemorrhage",
708
+ "haemorrhaged": "hemorrhaged",
709
+ "haemorrhages": "hemorrhages",
710
+ "haemorrhaging": "hemorrhaging",
711
+ "haemorrhoids": "hemorrhoids",
712
+ "harbour": "harbor",
713
+ "harboured": "harbored",
714
+ "harbouring": "harboring",
715
+ "harbours": "harbors",
716
+ "harmonisation": "harmonization",
717
+ "harmonise": "harmonize",
718
+ "harmonised": "harmonized",
719
+ "harmonises": "harmonizes",
720
+ "harmonising": "harmonizing",
721
+ "homoeopath": "homeopath",
722
+ "homoeopathic": "homeopathic",
723
+ "homoeopaths": "homeopaths",
724
+ "homoeopathy": "homeopathy",
725
+ "homogenise": "homogenize",
726
+ "homogenised": "homogenized",
727
+ "homogenises": "homogenizes",
728
+ "homogenising": "homogenizing",
729
+ "honour": "honor",
730
+ "honourable": "honorable",
731
+ "honourably": "honorably",
732
+ "honoured": "honored",
733
+ "honouring": "honoring",
734
+ "honours": "honors",
735
+ "hospitalisation": "hospitalization",
736
+ "hospitalise": "hospitalize",
737
+ "hospitalised": "hospitalized",
738
+ "hospitalises": "hospitalizes",
739
+ "hospitalising": "hospitalizing",
740
+ "humanise": "humanize",
741
+ "humanised": "humanized",
742
+ "humanises": "humanizes",
743
+ "humanising": "humanizing",
744
+ "humour": "humor",
745
+ "humoured": "humored",
746
+ "humouring": "humoring",
747
+ "humourless": "humorless",
748
+ "humours": "humors",
749
+ "hybridise": "hybridize",
750
+ "hybridised": "hybridized",
751
+ "hybridises": "hybridizes",
752
+ "hybridising": "hybridizing",
753
+ "hypnotise": "hypnotize",
754
+ "hypnotised": "hypnotized",
755
+ "hypnotises": "hypnotizes",
756
+ "hypnotising": "hypnotizing",
757
+ "hypothesise": "hypothesize",
758
+ "hypothesised": "hypothesized",
759
+ "hypothesises": "hypothesizes",
760
+ "hypothesising": "hypothesizing",
761
+ "idealisation": "idealization",
762
+ "idealise": "idealize",
763
+ "idealised": "idealized",
764
+ "idealises": "idealizes",
765
+ "idealising": "idealizing",
766
+ "idolise": "idolize",
767
+ "idolised": "idolized",
768
+ "idolises": "idolizes",
769
+ "idolising": "idolizing",
770
+ "immobilisation": "immobilization",
771
+ "immobilise": "immobilize",
772
+ "immobilised": "immobilized",
773
+ "immobiliser": "immobilizer",
774
+ "immobilisers": "immobilizers",
775
+ "immobilises": "immobilizes",
776
+ "immobilising": "immobilizing",
777
+ "immortalise": "immortalize",
778
+ "immortalised": "immortalized",
779
+ "immortalises": "immortalizes",
780
+ "immortalising": "immortalizing",
781
+ "immunisation": "immunization",
782
+ "immunise": "immunize",
783
+ "immunised": "immunized",
784
+ "immunises": "immunizes",
785
+ "immunising": "immunizing",
786
+ "impanelled": "impaneled",
787
+ "impanelling": "impaneling",
788
+ "imperilled": "imperiled",
789
+ "imperilling": "imperiling",
790
+ "individualise": "individualize",
791
+ "individualised": "individualized",
792
+ "individualises": "individualizes",
793
+ "individualising": "individualizing",
794
+ "industrialise": "industrialize",
795
+ "industrialised": "industrialized",
796
+ "industrialises": "industrializes",
797
+ "industrialising": "industrializing",
798
+ "inflexion": "inflection",
799
+ "inflexions": "inflections",
800
+ "initialise": "initialize",
801
+ "initialised": "initialized",
802
+ "initialises": "initializes",
803
+ "initialising": "initializing",
804
+ "initialled": "initialed",
805
+ "initialling": "initialing",
806
+ "instal": "install",
807
+ "instalment": "installment",
808
+ "instalments": "installments",
809
+ "instals": "installs",
810
+ "instil": "instill",
811
+ "instils": "instills",
812
+ "institutionalisation": "institutionalization",
813
+ "institutionalise": "institutionalize",
814
+ "institutionalised": "institutionalized",
815
+ "institutionalises": "institutionalizes",
816
+ "institutionalising": "institutionalizing",
817
+ "intellectualise": "intellectualize",
818
+ "intellectualised": "intellectualized",
819
+ "intellectualises": "intellectualizes",
820
+ "intellectualising": "intellectualizing",
821
+ "internalisation": "internalization",
822
+ "internalise": "internalize",
823
+ "internalised": "internalized",
824
+ "internalises": "internalizes",
825
+ "internalising": "internalizing",
826
+ "internationalisation": "internationalization",
827
+ "internationalise": "internationalize",
828
+ "internationalised": "internationalized",
829
+ "internationalises": "internationalizes",
830
+ "internationalising": "internationalizing",
831
+ "ionisation": "ionization",
832
+ "ionise": "ionize",
833
+ "ionised": "ionized",
834
+ "ioniser": "ionizer",
835
+ "ionisers": "ionizers",
836
+ "ionises": "ionizes",
837
+ "ionising": "ionizing",
838
+ "italicise": "italicize",
839
+ "italicised": "italicized",
840
+ "italicises": "italicizes",
841
+ "italicising": "italicizing",
842
+ "itemise": "itemize",
843
+ "itemised": "itemized",
844
+ "itemises": "itemizes",
845
+ "itemising": "itemizing",
846
+ "jeopardise": "jeopardize",
847
+ "jeopardised": "jeopardized",
848
+ "jeopardises": "jeopardizes",
849
+ "jeopardising": "jeopardizing",
850
+ "jewelled": "jeweled",
851
+ "jeweller": "jeweler",
852
+ "jewellers": "jewelers",
853
+ "jewellery": "jewelry",
854
+ "judgement": "judgment",
855
+ "kilogramme": "kilogram",
856
+ "kilogrammes": "kilograms",
857
+ "kilometre": "kilometer",
858
+ "kilometres": "kilometers",
859
+ "labelled": "labeled",
860
+ "labelling": "labeling",
861
+ "labour": "labor",
862
+ "laboured": "labored",
863
+ "labourer": "laborer",
864
+ "labourers": "laborers",
865
+ "labouring": "laboring",
866
+ "labours": "labors",
867
+ "lacklustre": "lackluster",
868
+ "legalisation": "legalization",
869
+ "legalise": "legalize",
870
+ "legalised": "legalized",
871
+ "legalises": "legalizes",
872
+ "legalising": "legalizing",
873
+ "legitimise": "legitimize",
874
+ "legitimised": "legitimized",
875
+ "legitimises": "legitimizes",
876
+ "legitimising": "legitimizing",
877
+ "leukaemia": "leukemia",
878
+ "levelled": "leveled",
879
+ "leveller": "leveler",
880
+ "levellers": "levelers",
881
+ "levelling": "leveling",
882
+ "libelled": "libeled",
883
+ "libelling": "libeling",
884
+ "libellous": "libelous",
885
+ "liberalisation": "liberalization",
886
+ "liberalise": "liberalize",
887
+ "liberalised": "liberalized",
888
+ "liberalises": "liberalizes",
889
+ "liberalising": "liberalizing",
890
+ "licence": "license",
891
+ "licenced": "licensed",
892
+ "licences": "licenses",
893
+ "licencing": "licensing",
894
+ "likeable": "likable",
895
+ "lionisation": "lionization",
896
+ "lionise": "lionize",
897
+ "lionised": "lionized",
898
+ "lionises": "lionizes",
899
+ "lionising": "lionizing",
900
+ "liquidise": "liquidize",
901
+ "liquidised": "liquidized",
902
+ "liquidiser": "liquidizer",
903
+ "liquidisers": "liquidizers",
904
+ "liquidises": "liquidizes",
905
+ "liquidising": "liquidizing",
906
+ "litre": "liter",
907
+ "litres": "liters",
908
+ "localise": "localize",
909
+ "localised": "localized",
910
+ "localises": "localizes",
911
+ "localising": "localizing",
912
+ "louvre": "louver",
913
+ "louvred": "louvered",
914
+ "louvres": "louvers",
915
+ "lustre": "luster",
916
+ "magnetise": "magnetize",
917
+ "magnetised": "magnetized",
918
+ "magnetises": "magnetizes",
919
+ "magnetising": "magnetizing",
920
+ "manoeuvrability": "maneuverability",
921
+ "manoeuvrable": "maneuverable",
922
+ "manoeuvre": "maneuver",
923
+ "manoeuvred": "maneuvered",
924
+ "manoeuvres": "maneuvers",
925
+ "manoeuvring": "maneuvering",
926
+ "manoeuvrings": "maneuverings",
927
+ "marginalisation": "marginalization",
928
+ "marginalise": "marginalize",
929
+ "marginalised": "marginalized",
930
+ "marginalises": "marginalizes",
931
+ "marginalising": "marginalizing",
932
+ "marshalled": "marshaled",
933
+ "marshalling": "marshaling",
934
+ "marvelled": "marveled",
935
+ "marvelling": "marveling",
936
+ "marvellous": "marvelous",
937
+ "marvellously": "marvelously",
938
+ "materialisation": "materialization",
939
+ "materialise": "materialize",
940
+ "materialised": "materialized",
941
+ "materialises": "materializes",
942
+ "materialising": "materializing",
943
+ "maximisation": "maximization",
944
+ "maximise": "maximize",
945
+ "maximised": "maximized",
946
+ "maximises": "maximizes",
947
+ "maximising": "maximizing",
948
+ "meagre": "meager",
949
+ "mechanisation": "mechanization",
950
+ "mechanise": "mechanize",
951
+ "mechanised": "mechanized",
952
+ "mechanises": "mechanizes",
953
+ "mechanising": "mechanizing",
954
+ "mediaeval": "medieval",
955
+ "memorialise": "memorialize",
956
+ "memorialised": "memorialized",
957
+ "memorialises": "memorializes",
958
+ "memorialising": "memorializing",
959
+ "memorise": "memorize",
960
+ "memorised": "memorized",
961
+ "memorises": "memorizes",
962
+ "memorising": "memorizing",
963
+ "mesmerise": "mesmerize",
964
+ "mesmerised": "mesmerized",
965
+ "mesmerises": "mesmerizes",
966
+ "mesmerising": "mesmerizing",
967
+ "metabolise": "metabolize",
968
+ "metabolised": "metabolized",
969
+ "metabolises": "metabolizes",
970
+ "metabolising": "metabolizing",
971
+ "metre": "meter",
972
+ "metres": "meters",
973
+ "mhm": "hmm",
974
+ "micrometre": "micrometer",
975
+ "micrometres": "micrometers",
976
+ "militarise": "militarize",
977
+ "militarised": "militarized",
978
+ "militarises": "militarizes",
979
+ "militarising": "militarizing",
980
+ "milligramme": "milligram",
981
+ "milligrammes": "milligrams",
982
+ "millilitre": "milliliter",
983
+ "millilitres": "milliliters",
984
+ "millimetre": "millimeter",
985
+ "millimetres": "millimeters",
986
+ "miniaturisation": "miniaturization",
987
+ "miniaturise": "miniaturize",
988
+ "miniaturised": "miniaturized",
989
+ "miniaturises": "miniaturizes",
990
+ "miniaturising": "miniaturizing",
991
+ "minibusses": "minibuses",
992
+ "minimise": "minimize",
993
+ "minimised": "minimized",
994
+ "minimises": "minimizes",
995
+ "minimising": "minimizing",
996
+ "misbehaviour": "misbehavior",
997
+ "misdemeanour": "misdemeanor",
998
+ "misdemeanours": "misdemeanors",
999
+ "misspelt": "misspelled",
1000
+ "mitre": "miter",
1001
+ "mitres": "miters",
1002
+ "mm": "hmm",
1003
+ "mmm": "hmm",
1004
+ "mobilisation": "mobilization",
1005
+ "mobilise": "mobilize",
1006
+ "mobilised": "mobilized",
1007
+ "mobilises": "mobilizes",
1008
+ "mobilising": "mobilizing",
1009
+ "modelled": "modeled",
1010
+ "modeller": "modeler",
1011
+ "modellers": "modelers",
1012
+ "modelling": "modeling",
1013
+ "modernise": "modernize",
1014
+ "modernised": "modernized",
1015
+ "modernises": "modernizes",
1016
+ "modernising": "modernizing",
1017
+ "moisturise": "moisturize",
1018
+ "moisturised": "moisturized",
1019
+ "moisturiser": "moisturizer",
1020
+ "moisturisers": "moisturizers",
1021
+ "moisturises": "moisturizes",
1022
+ "moisturising": "moisturizing",
1023
+ "monologue": "monolog",
1024
+ "monologues": "monologs",
1025
+ "monopolisation": "monopolization",
1026
+ "monopolise": "monopolize",
1027
+ "monopolised": "monopolized",
1028
+ "monopolises": "monopolizes",
1029
+ "monopolising": "monopolizing",
1030
+ "moralise": "moralize",
1031
+ "moralised": "moralized",
1032
+ "moralises": "moralizes",
1033
+ "moralising": "moralizing",
1034
+ "motorised": "motorized",
1035
+ "mould": "mold",
1036
+ "moulded": "molded",
1037
+ "moulder": "molder",
1038
+ "mouldered": "moldered",
1039
+ "mouldering": "moldering",
1040
+ "moulders": "molders",
1041
+ "mouldier": "moldier",
1042
+ "mouldiest": "moldiest",
1043
+ "moulding": "molding",
1044
+ "mouldings": "moldings",
1045
+ "moulds": "molds",
1046
+ "mouldy": "moldy",
1047
+ "moult": "molt",
1048
+ "moulted": "molted",
1049
+ "moulting": "molting",
1050
+ "moults": "molts",
1051
+ "moustache": "mustache",
1052
+ "moustached": "mustached",
1053
+ "moustaches": "mustaches",
1054
+ "moustachioed": "mustachioed",
1055
+ "multicoloured": "multicolored",
1056
+ "nationalisation": "nationalization",
1057
+ "nationalisations": "nationalizations",
1058
+ "nationalise": "nationalize",
1059
+ "nationalised": "nationalized",
1060
+ "nationalises": "nationalizes",
1061
+ "nationalising": "nationalizing",
1062
+ "naturalisation": "naturalization",
1063
+ "naturalise": "naturalize",
1064
+ "naturalised": "naturalized",
1065
+ "naturalises": "naturalizes",
1066
+ "naturalising": "naturalizing",
1067
+ "neighbour": "neighbor",
1068
+ "neighbourhood": "neighborhood",
1069
+ "neighbourhoods": "neighborhoods",
1070
+ "neighbouring": "neighboring",
1071
+ "neighbourliness": "neighborliness",
1072
+ "neighbourly": "neighborly",
1073
+ "neighbours": "neighbors",
1074
+ "neutralisation": "neutralization",
1075
+ "neutralise": "neutralize",
1076
+ "neutralised": "neutralized",
1077
+ "neutralises": "neutralizes",
1078
+ "neutralising": "neutralizing",
1079
+ "normalisation": "normalization",
1080
+ "normalise": "normalize",
1081
+ "normalised": "normalized",
1082
+ "normalises": "normalizes",
1083
+ "normalising": "normalizing",
1084
+ "odour": "odor",
1085
+ "odourless": "odorless",
1086
+ "odours": "odors",
1087
+ "oesophagus": "esophagus",
1088
+ "oesophaguses": "esophaguses",
1089
+ "oestrogen": "estrogen",
1090
+ "offence": "offense",
1091
+ "offences": "offenses",
1092
+ "omelette": "omelet",
1093
+ "omelettes": "omelets",
1094
+ "optimise": "optimize",
1095
+ "optimised": "optimized",
1096
+ "optimises": "optimizes",
1097
+ "optimising": "optimizing",
1098
+ "organisation": "organization",
1099
+ "organisational": "organizational",
1100
+ "organisations": "organizations",
1101
+ "organise": "organize",
1102
+ "organised": "organized",
1103
+ "organiser": "organizer",
1104
+ "organisers": "organizers",
1105
+ "organises": "organizes",
1106
+ "organising": "organizing",
1107
+ "orthopaedic": "orthopedic",
1108
+ "orthopaedics": "orthopedics",
1109
+ "ostracise": "ostracize",
1110
+ "ostracised": "ostracized",
1111
+ "ostracises": "ostracizes",
1112
+ "ostracising": "ostracizing",
1113
+ "outmanoeuvre": "outmaneuver",
1114
+ "outmanoeuvred": "outmaneuvered",
1115
+ "outmanoeuvres": "outmaneuvers",
1116
+ "outmanoeuvring": "outmaneuvering",
1117
+ "overemphasise": "overemphasize",
1118
+ "overemphasised": "overemphasized",
1119
+ "overemphasises": "overemphasizes",
1120
+ "overemphasising": "overemphasizing",
1121
+ "oxidisation": "oxidization",
1122
+ "oxidise": "oxidize",
1123
+ "oxidised": "oxidized",
1124
+ "oxidises": "oxidizes",
1125
+ "oxidising": "oxidizing",
1126
+ "paederast": "pederast",
1127
+ "paederasts": "pederasts",
1128
+ "paediatric": "pediatric",
1129
+ "paediatrician": "pediatrician",
1130
+ "paediatricians": "pediatricians",
1131
+ "paediatrics": "pediatrics",
1132
+ "paedophile": "pedophile",
1133
+ "paedophiles": "pedophiles",
1134
+ "paedophilia": "pedophilia",
1135
+ "palaeolithic": "paleolithic",
1136
+ "palaeontologist": "paleontologist",
1137
+ "palaeontologists": "paleontologists",
1138
+ "palaeontology": "paleontology",
1139
+ "panelled": "paneled",
1140
+ "panelling": "paneling",
1141
+ "panellist": "panelist",
1142
+ "panellists": "panelists",
1143
+ "paralyse": "paralyze",
1144
+ "paralysed": "paralyzed",
1145
+ "paralyses": "paralyzes",
1146
+ "paralysing": "paralyzing",
1147
+ "parcelled": "parceled",
1148
+ "parcelling": "parceling",
1149
+ "parlour": "parlor",
1150
+ "parlours": "parlors",
1151
+ "particularise": "particularize",
1152
+ "particularised": "particularized",
1153
+ "particularises": "particularizes",
1154
+ "particularising": "particularizing",
1155
+ "passivisation": "passivization",
1156
+ "passivise": "passivize",
1157
+ "passivised": "passivized",
1158
+ "passivises": "passivizes",
1159
+ "passivising": "passivizing",
1160
+ "pasteurisation": "pasteurization",
1161
+ "pasteurise": "pasteurize",
1162
+ "pasteurised": "pasteurized",
1163
+ "pasteurises": "pasteurizes",
1164
+ "pasteurising": "pasteurizing",
1165
+ "patronise": "patronize",
1166
+ "patronised": "patronized",
1167
+ "patronises": "patronizes",
1168
+ "patronising": "patronizing",
1169
+ "patronisingly": "patronizingly",
1170
+ "pedalled": "pedaled",
1171
+ "pedalling": "pedaling",
1172
+ "pedestrianisation": "pedestrianization",
1173
+ "pedestrianise": "pedestrianize",
1174
+ "pedestrianised": "pedestrianized",
1175
+ "pedestrianises": "pedestrianizes",
1176
+ "pedestrianising": "pedestrianizing",
1177
+ "penalise": "penalize",
1178
+ "penalised": "penalized",
1179
+ "penalises": "penalizes",
1180
+ "penalising": "penalizing",
1181
+ "pencilled": "penciled",
1182
+ "pencilling": "penciling",
1183
+ "personalise": "personalize",
1184
+ "personalised": "personalized",
1185
+ "personalises": "personalizes",
1186
+ "personalising": "personalizing",
1187
+ "pharmacopoeia": "pharmacopeia",
1188
+ "pharmacopoeias": "pharmacopeias",
1189
+ "philosophise": "philosophize",
1190
+ "philosophised": "philosophized",
1191
+ "philosophises": "philosophizes",
1192
+ "philosophising": "philosophizing",
1193
+ "philtre": "filter",
1194
+ "philtres": "filters",
1195
+ "phoney": "phony",
1196
+ "plagiarise": "plagiarize",
1197
+ "plagiarised": "plagiarized",
1198
+ "plagiarises": "plagiarizes",
1199
+ "plagiarising": "plagiarizing",
1200
+ "plough": "plow",
1201
+ "ploughed": "plowed",
1202
+ "ploughing": "plowing",
1203
+ "ploughman": "plowman",
1204
+ "ploughmen": "plowmen",
1205
+ "ploughs": "plows",
1206
+ "ploughshare": "plowshare",
1207
+ "ploughshares": "plowshares",
1208
+ "polarisation": "polarization",
1209
+ "polarise": "polarize",
1210
+ "polarised": "polarized",
1211
+ "polarises": "polarizes",
1212
+ "polarising": "polarizing",
1213
+ "politicisation": "politicization",
1214
+ "politicise": "politicize",
1215
+ "politicised": "politicized",
1216
+ "politicises": "politicizes",
1217
+ "politicising": "politicizing",
1218
+ "popularisation": "popularization",
1219
+ "popularise": "popularize",
1220
+ "popularised": "popularized",
1221
+ "popularises": "popularizes",
1222
+ "popularising": "popularizing",
1223
+ "pouffe": "pouf",
1224
+ "pouffes": "poufs",
1225
+ "practise": "practice",
1226
+ "practised": "practiced",
1227
+ "practises": "practices",
1228
+ "practising": "practicing",
1229
+ "praesidium": "presidium",
1230
+ "praesidiums": "presidiums",
1231
+ "pressurisation": "pressurization",
1232
+ "pressurise": "pressurize",
1233
+ "pressurised": "pressurized",
1234
+ "pressurises": "pressurizes",
1235
+ "pressurising": "pressurizing",
1236
+ "pretence": "pretense",
1237
+ "pretences": "pretenses",
1238
+ "primaeval": "primeval",
1239
+ "prioritisation": "prioritization",
1240
+ "prioritise": "prioritize",
1241
+ "prioritised": "prioritized",
1242
+ "prioritises": "prioritizes",
1243
+ "prioritising": "prioritizing",
1244
+ "privatisation": "privatization",
1245
+ "privatisations": "privatizations",
1246
+ "privatise": "privatize",
1247
+ "privatised": "privatized",
1248
+ "privatises": "privatizes",
1249
+ "privatising": "privatizing",
1250
+ "professionalisation": "professionalization",
1251
+ "professionalise": "professionalize",
1252
+ "professionalised": "professionalized",
1253
+ "professionalises": "professionalizes",
1254
+ "professionalising": "professionalizing",
1255
+ "programme": "program",
1256
+ "programmes": "programs",
1257
+ "prologue": "prolog",
1258
+ "prologues": "prologs",
1259
+ "propagandise": "propagandize",
1260
+ "propagandised": "propagandized",
1261
+ "propagandises": "propagandizes",
1262
+ "propagandising": "propagandizing",
1263
+ "proselytise": "proselytize",
1264
+ "proselytised": "proselytized",
1265
+ "proselytiser": "proselytizer",
1266
+ "proselytisers": "proselytizers",
1267
+ "proselytises": "proselytizes",
1268
+ "proselytising": "proselytizing",
1269
+ "psychoanalyse": "psychoanalyze",
1270
+ "psychoanalysed": "psychoanalyzed",
1271
+ "psychoanalyses": "psychoanalyzes",
1272
+ "psychoanalysing": "psychoanalyzing",
1273
+ "publicise": "publicize",
1274
+ "publicised": "publicized",
1275
+ "publicises": "publicizes",
1276
+ "publicising": "publicizing",
1277
+ "pulverisation": "pulverization",
1278
+ "pulverise": "pulverize",
1279
+ "pulverised": "pulverized",
1280
+ "pulverises": "pulverizes",
1281
+ "pulverising": "pulverizing",
1282
+ "pummelled": "pummel",
1283
+ "pummelling": "pummeled",
1284
+ "pyjama": "pajama",
1285
+ "pyjamas": "pajamas",
1286
+ "pzazz": "pizzazz",
1287
+ "quarrelled": "quarreled",
1288
+ "quarrelling": "quarreling",
1289
+ "radicalise": "radicalize",
1290
+ "radicalised": "radicalized",
1291
+ "radicalises": "radicalizes",
1292
+ "radicalising": "radicalizing",
1293
+ "rancour": "rancor",
1294
+ "randomise": "randomize",
1295
+ "randomised": "randomized",
1296
+ "randomises": "randomizes",
1297
+ "randomising": "randomizing",
1298
+ "rationalisation": "rationalization",
1299
+ "rationalisations": "rationalizations",
1300
+ "rationalise": "rationalize",
1301
+ "rationalised": "rationalized",
1302
+ "rationalises": "rationalizes",
1303
+ "rationalising": "rationalizing",
1304
+ "ravelled": "raveled",
1305
+ "ravelling": "raveling",
1306
+ "realisable": "realizable",
1307
+ "realisation": "realization",
1308
+ "realisations": "realizations",
1309
+ "realise": "realize",
1310
+ "realised": "realized",
1311
+ "realises": "realizes",
1312
+ "realising": "realizing",
1313
+ "recognisable": "recognizable",
1314
+ "recognisably": "recognizably",
1315
+ "recognisance": "recognizance",
1316
+ "recognise": "recognize",
1317
+ "recognised": "recognized",
1318
+ "recognises": "recognizes",
1319
+ "recognising": "recognizing",
1320
+ "reconnoitre": "reconnoiter",
1321
+ "reconnoitred": "reconnoitered",
1322
+ "reconnoitres": "reconnoiters",
1323
+ "reconnoitring": "reconnoitering",
1324
+ "refuelled": "refueled",
1325
+ "refuelling": "refueling",
1326
+ "regularisation": "regularization",
1327
+ "regularise": "regularize",
1328
+ "regularised": "regularized",
1329
+ "regularises": "regularizes",
1330
+ "regularising": "regularizing",
1331
+ "remodelled": "remodeled",
1332
+ "remodelling": "remodeling",
1333
+ "remould": "remold",
1334
+ "remoulded": "remolded",
1335
+ "remoulding": "remolding",
1336
+ "remoulds": "remolds",
1337
+ "reorganisation": "reorganization",
1338
+ "reorganisations": "reorganizations",
1339
+ "reorganise": "reorganize",
1340
+ "reorganised": "reorganized",
1341
+ "reorganises": "reorganizes",
1342
+ "reorganising": "reorganizing",
1343
+ "revelled": "reveled",
1344
+ "reveller": "reveler",
1345
+ "revellers": "revelers",
1346
+ "revelling": "reveling",
1347
+ "revitalise": "revitalize",
1348
+ "revitalised": "revitalized",
1349
+ "revitalises": "revitalizes",
1350
+ "revitalising": "revitalizing",
1351
+ "revolutionise": "revolutionize",
1352
+ "revolutionised": "revolutionized",
1353
+ "revolutionises": "revolutionizes",
1354
+ "revolutionising": "revolutionizing",
1355
+ "rhapsodise": "rhapsodize",
1356
+ "rhapsodised": "rhapsodized",
1357
+ "rhapsodises": "rhapsodizes",
1358
+ "rhapsodising": "rhapsodizing",
1359
+ "rigour": "rigor",
1360
+ "rigours": "rigors",
1361
+ "ritualised": "ritualized",
1362
+ "rivalled": "rivaled",
1363
+ "rivalling": "rivaling",
1364
+ "romanticise": "romanticize",
1365
+ "romanticised": "romanticized",
1366
+ "romanticises": "romanticizes",
1367
+ "romanticising": "romanticizing",
1368
+ "rumour": "rumor",
1369
+ "rumoured": "rumored",
1370
+ "rumours": "rumors",
1371
+ "sabre": "saber",
1372
+ "sabres": "sabers",
1373
+ "saltpetre": "saltpeter",
1374
+ "sanitise": "sanitize",
1375
+ "sanitised": "sanitized",
1376
+ "sanitises": "sanitizes",
1377
+ "sanitising": "sanitizing",
1378
+ "satirise": "satirize",
1379
+ "satirised": "satirized",
1380
+ "satirises": "satirizes",
1381
+ "satirising": "satirizing",
1382
+ "saviour": "savior",
1383
+ "saviours": "saviors",
1384
+ "savour": "savor",
1385
+ "savoured": "savored",
1386
+ "savouries": "savories",
1387
+ "savouring": "savoring",
1388
+ "savours": "savors",
1389
+ "savoury": "savory",
1390
+ "scandalise": "scandalize",
1391
+ "scandalised": "scandalized",
1392
+ "scandalises": "scandalizes",
1393
+ "scandalising": "scandalizing",
1394
+ "sceptic": "skeptic",
1395
+ "sceptical": "skeptical",
1396
+ "sceptically": "skeptically",
1397
+ "scepticism": "skepticism",
1398
+ "sceptics": "skeptics",
1399
+ "sceptre": "scepter",
1400
+ "sceptres": "scepters",
1401
+ "scrutinise": "scrutinize",
1402
+ "scrutinised": "scrutinized",
1403
+ "scrutinises": "scrutinizes",
1404
+ "scrutinising": "scrutinizing",
1405
+ "secularisation": "secularization",
1406
+ "secularise": "secularize",
1407
+ "secularised": "secularized",
1408
+ "secularises": "secularizes",
1409
+ "secularising": "secularizing",
1410
+ "sensationalise": "sensationalize",
1411
+ "sensationalised": "sensationalized",
1412
+ "sensationalises": "sensationalizes",
1413
+ "sensationalising": "sensationalizing",
1414
+ "sensitise": "sensitize",
1415
+ "sensitised": "sensitized",
1416
+ "sensitises": "sensitizes",
1417
+ "sensitising": "sensitizing",
1418
+ "sentimentalise": "sentimentalize",
1419
+ "sentimentalised": "sentimentalized",
1420
+ "sentimentalises": "sentimentalizes",
1421
+ "sentimentalising": "sentimentalizing",
1422
+ "sepulchre": "sepulcher",
1423
+ "sepulchres": "sepulchers",
1424
+ "serialisation": "serialization",
1425
+ "serialisations": "serializations",
1426
+ "serialise": "serialize",
1427
+ "serialised": "serialized",
1428
+ "serialises": "serializes",
1429
+ "serialising": "serializing",
1430
+ "sermonise": "sermonize",
1431
+ "sermonised": "sermonized",
1432
+ "sermonises": "sermonizes",
1433
+ "sermonising": "sermonizing",
1434
+ "sheikh": "sheik",
1435
+ "shovelled": "shoveled",
1436
+ "shovelling": "shoveling",
1437
+ "shrivelled": "shriveled",
1438
+ "shrivelling": "shriveling",
1439
+ "signalise": "signalize",
1440
+ "signalised": "signalized",
1441
+ "signalises": "signalizes",
1442
+ "signalising": "signalizing",
1443
+ "signalled": "signaled",
1444
+ "signalling": "signaling",
1445
+ "smoulder": "smolder",
1446
+ "smouldered": "smoldered",
1447
+ "smouldering": "smoldering",
1448
+ "smoulders": "smolders",
1449
+ "snivelled": "sniveled",
1450
+ "snivelling": "sniveling",
1451
+ "snorkelled": "snorkeled",
1452
+ "snorkelling": "snorkeling",
1453
+ "snowplough": "snowplow",
1454
+ "snowploughs": "snowplow",
1455
+ "socialisation": "socialization",
1456
+ "socialise": "socialize",
1457
+ "socialised": "socialized",
1458
+ "socialises": "socializes",
1459
+ "socialising": "socializing",
1460
+ "sodomise": "sodomize",
1461
+ "sodomised": "sodomized",
1462
+ "sodomises": "sodomizes",
1463
+ "sodomising": "sodomizing",
1464
+ "solemnise": "solemnize",
1465
+ "solemnised": "solemnized",
1466
+ "solemnises": "solemnizes",
1467
+ "solemnising": "solemnizing",
1468
+ "sombre": "somber",
1469
+ "specialisation": "specialization",
1470
+ "specialisations": "specializations",
1471
+ "specialise": "specialize",
1472
+ "specialised": "specialized",
1473
+ "specialises": "specializes",
1474
+ "specialising": "specializing",
1475
+ "spectre": "specter",
1476
+ "spectres": "specters",
1477
+ "spiralled": "spiraled",
1478
+ "spiralling": "spiraling",
1479
+ "splendour": "splendor",
1480
+ "splendours": "splendors",
1481
+ "squirrelled": "squirreled",
1482
+ "squirrelling": "squirreling",
1483
+ "stabilisation": "stabilization",
1484
+ "stabilise": "stabilize",
1485
+ "stabilised": "stabilized",
1486
+ "stabiliser": "stabilizer",
1487
+ "stabilisers": "stabilizers",
1488
+ "stabilises": "stabilizes",
1489
+ "stabilising": "stabilizing",
1490
+ "standardisation": "standardization",
1491
+ "standardise": "standardize",
1492
+ "standardised": "standardized",
1493
+ "standardises": "standardizes",
1494
+ "standardising": "standardizing",
1495
+ "stencilled": "stenciled",
1496
+ "stencilling": "stenciling",
1497
+ "sterilisation": "sterilization",
1498
+ "sterilisations": "sterilizations",
1499
+ "sterilise": "sterilize",
1500
+ "sterilised": "sterilized",
1501
+ "steriliser": "sterilizer",
1502
+ "sterilisers": "sterilizers",
1503
+ "sterilises": "sterilizes",
1504
+ "sterilising": "sterilizing",
1505
+ "stigmatisation": "stigmatization",
1506
+ "stigmatise": "stigmatize",
1507
+ "stigmatised": "stigmatized",
1508
+ "stigmatises": "stigmatizes",
1509
+ "stigmatising": "stigmatizing",
1510
+ "storey": "story",
1511
+ "storeys": "stories",
1512
+ "subsidisation": "subsidization",
1513
+ "subsidise": "subsidize",
1514
+ "subsidised": "subsidized",
1515
+ "subsidiser": "subsidizer",
1516
+ "subsidisers": "subsidizers",
1517
+ "subsidises": "subsidizes",
1518
+ "subsidising": "subsidizing",
1519
+ "succour": "succor",
1520
+ "succoured": "succored",
1521
+ "succouring": "succoring",
1522
+ "succours": "succors",
1523
+ "sulphate": "sulfate",
1524
+ "sulphates": "sulfates",
1525
+ "sulphide": "sulfide",
1526
+ "sulphides": "sulfides",
1527
+ "sulphur": "sulfur",
1528
+ "sulphurous": "sulfurous",
1529
+ "summarise": "summarize",
1530
+ "summarised": "summarized",
1531
+ "summarises": "summarizes",
1532
+ "summarising": "summarizing",
1533
+ "swivelled": "swiveled",
1534
+ "swivelling": "swiveling",
1535
+ "symbolise": "symbolize",
1536
+ "symbolised": "symbolized",
1537
+ "symbolises": "symbolizes",
1538
+ "symbolising": "symbolizing",
1539
+ "sympathise": "sympathize",
1540
+ "sympathised": "sympathized",
1541
+ "sympathiser": "sympathizer",
1542
+ "sympathisers": "sympathizers",
1543
+ "sympathises": "sympathizes",
1544
+ "sympathising": "sympathizing",
1545
+ "synchronisation": "synchronization",
1546
+ "synchronise": "synchronize",
1547
+ "synchronised": "synchronized",
1548
+ "synchronises": "synchronizes",
1549
+ "synchronising": "synchronizing",
1550
+ "synthesise": "synthesize",
1551
+ "synthesised": "synthesized",
1552
+ "synthesiser": "synthesizer",
1553
+ "synthesisers": "synthesizers",
1554
+ "synthesises": "synthesizes",
1555
+ "synthesising": "synthesizing",
1556
+ "syphon": "siphon",
1557
+ "syphoned": "siphoned",
1558
+ "syphoning": "siphoning",
1559
+ "syphons": "siphons",
1560
+ "systematisation": "systematization",
1561
+ "systematise": "systematize",
1562
+ "systematised": "systematized",
1563
+ "systematises": "systematizes",
1564
+ "systematising": "systematizing",
1565
+ "tantalise": "tantalize",
1566
+ "tantalised": "tantalized",
1567
+ "tantalises": "tantalizes",
1568
+ "tantalising": "tantalizing",
1569
+ "tantalisingly": "tantalizingly",
1570
+ "tasselled": "tasseled",
1571
+ "technicolour": "technicolor",
1572
+ "temporise": "temporize",
1573
+ "temporised": "temporized",
1574
+ "temporises": "temporizes",
1575
+ "temporising": "temporizing",
1576
+ "tenderise": "tenderize",
1577
+ "tenderised": "tenderized",
1578
+ "tenderises": "tenderizes",
1579
+ "tenderising": "tenderizing",
1580
+ "terrorise": "terrorize",
1581
+ "terrorised": "terrorized",
1582
+ "terrorises": "terrorizes",
1583
+ "terrorising": "terrorizing",
1584
+ "theatre": "theater",
1585
+ "theatregoer": "theatergoer",
1586
+ "theatregoers": "theatergoers",
1587
+ "theatres": "theaters",
1588
+ "theorise": "theorize",
1589
+ "theorised": "theorized",
1590
+ "theorises": "theorizes",
1591
+ "theorising": "theorizing",
1592
+ "tonne": "ton",
1593
+ "tonnes": "tons",
1594
+ "towelled": "toweled",
1595
+ "towelling": "toweling",
1596
+ "toxaemia": "toxemia",
1597
+ "tranquillise": "tranquilize",
1598
+ "tranquillised": "tranquilized",
1599
+ "tranquilliser": "tranquilizer",
1600
+ "tranquillisers": "tranquilizers",
1601
+ "tranquillises": "tranquilizes",
1602
+ "tranquillising": "tranquilizing",
1603
+ "tranquillity": "tranquility",
1604
+ "tranquillize": "tranquilize",
1605
+ "tranquillized": "tranquilized",
1606
+ "tranquillizer": "tranquilizer",
1607
+ "tranquillizers": "tranquilizers",
1608
+ "tranquillizes": "tranquilizes",
1609
+ "tranquillizing": "tranquilizing",
1610
+ "tranquilly": "tranquility",
1611
+ "transistorised": "transistorized",
1612
+ "traumatise": "traumatize",
1613
+ "traumatised": "traumatized",
1614
+ "traumatises": "traumatizes",
1615
+ "traumatising": "traumatizing",
1616
+ "travelled": "traveled",
1617
+ "traveller": "traveler",
1618
+ "travellers": "travelers",
1619
+ "travelling": "traveling",
1620
+ "travelog": "travelogue",
1621
+ "travelogs": "travelogues",
1622
+ "trialled": "trialed",
1623
+ "trialling": "trialing",
1624
+ "tricolour": "tricolor",
1625
+ "tricolours": "tricolors",
1626
+ "trivialise": "trivialize",
1627
+ "trivialised": "trivialized",
1628
+ "trivialises": "trivializes",
1629
+ "trivialising": "trivializing",
1630
+ "tumour": "tumor",
1631
+ "tumours": "tumors",
1632
+ "tunnelled": "tunneled",
1633
+ "tunnelling": "tunneling",
1634
+ "tyrannise": "tyrannize",
1635
+ "tyrannised": "tyrannized",
1636
+ "tyrannises": "tyrannizes",
1637
+ "tyrannising": "tyrannizing",
1638
+ "tyre": "tire",
1639
+ "tyres": "tires",
1640
+ "unauthorised": "unauthorized",
1641
+ "uncivilised": "uncivilized",
1642
+ "underutilised": "underutilized",
1643
+ "unequalled": "unequaled",
1644
+ "unfavourable": "unfavorable",
1645
+ "unfavourably": "unfavorably",
1646
+ "unionisation": "unionization",
1647
+ "unionise": "unionize",
1648
+ "unionised": "unionized",
1649
+ "unionises": "unionizes",
1650
+ "unionising": "unionizing",
1651
+ "unorganised": "unorganized",
1652
+ "unravelled": "unraveled",
1653
+ "unravelling": "unraveling",
1654
+ "unrecognisable": "unrecognizable",
1655
+ "unrecognised": "unrecognized",
1656
+ "unrivalled": "unrivaled",
1657
+ "unsavoury": "unsavory",
1658
+ "untrammelled": "untrammeled",
1659
+ "urbanisation": "urbanization",
1660
+ "urbanise": "urbanize",
1661
+ "urbanised": "urbanized",
1662
+ "urbanises": "urbanizes",
1663
+ "urbanising": "urbanizing",
1664
+ "utilisable": "utilizable",
1665
+ "utilisation": "utilization",
1666
+ "utilise": "utilize",
1667
+ "utilised": "utilized",
1668
+ "utilises": "utilizes",
1669
+ "utilising": "utilizing",
1670
+ "valour": "valor",
1671
+ "vandalise": "vandalize",
1672
+ "vandalised": "vandalized",
1673
+ "vandalises": "vandalizes",
1674
+ "vandalising": "vandalizing",
1675
+ "vaporisation": "vaporization",
1676
+ "vaporise": "vaporize",
1677
+ "vaporised": "vaporized",
1678
+ "vaporises": "vaporizes",
1679
+ "vaporising": "vaporizing",
1680
+ "vapour": "vapor",
1681
+ "vapours": "vapors",
1682
+ "verbalise": "verbalize",
1683
+ "verbalised": "verbalized",
1684
+ "verbalises": "verbalizes",
1685
+ "verbalising": "verbalizing",
1686
+ "victimisation": "victimization",
1687
+ "victimise": "victimize",
1688
+ "victimised": "victimized",
1689
+ "victimises": "victimizes",
1690
+ "victimising": "victimizing",
1691
+ "videodisc": "videodisk",
1692
+ "videodiscs": "videodisks",
1693
+ "vigour": "vigor",
1694
+ "visualisation": "visualization",
1695
+ "visualisations": "visualizations",
1696
+ "visualise": "visualize",
1697
+ "visualised": "visualized",
1698
+ "visualises": "visualizes",
1699
+ "visualising": "visualizing",
1700
+ "vocalisation": "vocalization",
1701
+ "vocalisations": "vocalizations",
1702
+ "vocalise": "vocalize",
1703
+ "vocalised": "vocalized",
1704
+ "vocalises": "vocalizes",
1705
+ "vocalising": "vocalizing",
1706
+ "vulcanised": "vulcanized",
1707
+ "vulgarisation": "vulgarization",
1708
+ "vulgarise": "vulgarize",
1709
+ "vulgarised": "vulgarized",
1710
+ "vulgarises": "vulgarizes",
1711
+ "vulgarising": "vulgarizing",
1712
+ "waggon": "wagon",
1713
+ "waggons": "wagons",
1714
+ "watercolour": "watercolor",
1715
+ "watercolours": "watercolors",
1716
+ "weaselled": "weaseled",
1717
+ "weaselling": "weaseling",
1718
+ "westernisation": "westernization",
1719
+ "westernise": "westernize",
1720
+ "westernised": "westernized",
1721
+ "westernises": "westernizes",
1722
+ "westernising": "westernizing",
1723
+ "womanise": "womanize",
1724
+ "womanised": "womanized",
1725
+ "womaniser": "womanizer",
1726
+ "womanisers": "womanizers",
1727
+ "womanises": "womanizes",
1728
+ "womanising": "womanizing",
1729
+ "woollen": "woolen",
1730
+ "woollens": "woolens",
1731
+ "woollies": "woolies",
1732
+ "woolly": "wooly",
1733
+ "worshipped": "worshiped",
1734
+ "worshipper": "worshiper",
1735
+ "worshipping": "worshiping",
1736
+ "yodelled": "yodeled",
1737
+ "yodelling": "yodeling",
1738
+ "yoghourt": "yogurt",
1739
+ "yoghourts": "yogurts",
1740
+ "yoghurt": "yogurt",
1741
+ "yoghurts": "yogurts"
1742
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunk_length": 30,
3
+ "feature_extractor_type": "WhisperFeatureExtractor",
4
+ "feature_size": 80,
5
+ "hop_length": 160,
6
+ "n_fft": 400,
7
+ "n_samples": 480000,
8
+ "nb_max_frames": 3000,
9
+ "padding_side": "right",
10
+ "padding_value": 0.0,
11
+ "processor_class": "WhisperProcessor",
12
+ "return_attention_mask": false,
13
+ "sampling_rate": 16000
14
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
trainer_state.json ADDED
@@ -0,0 +1,2985 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.18627458810806274,
3
+ "best_model_checkpoint": "/data1/CACHE/huggingface/hg_training/traing_whisper_base_9dataset_multi-gpu4-wnoise/checkpoint-38000",
4
+ "epoch": 6.846846846846847,
5
+ "eval_steps": 1000,
6
+ "global_step": 38000,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.02,
13
+ "grad_norm": 6.807682991027832,
14
+ "learning_rate": 1.9200000000000003e-06,
15
+ "loss": 1.579,
16
+ "step": 100
17
+ },
18
+ {
19
+ "epoch": 0.04,
20
+ "grad_norm": 4.137929916381836,
21
+ "learning_rate": 3.920000000000001e-06,
22
+ "loss": 0.6054,
23
+ "step": 200
24
+ },
25
+ {
26
+ "epoch": 0.05,
27
+ "grad_norm": 3.63185977935791,
28
+ "learning_rate": 5.92e-06,
29
+ "loss": 0.4771,
30
+ "step": 300
31
+ },
32
+ {
33
+ "epoch": 0.07,
34
+ "grad_norm": 4.2249555587768555,
35
+ "learning_rate": 7.92e-06,
36
+ "loss": 0.4225,
37
+ "step": 400
38
+ },
39
+ {
40
+ "epoch": 0.09,
41
+ "grad_norm": 4.122411251068115,
42
+ "learning_rate": 9.920000000000002e-06,
43
+ "loss": 0.387,
44
+ "step": 500
45
+ },
46
+ {
47
+ "epoch": 0.11,
48
+ "grad_norm": 3.319619655609131,
49
+ "learning_rate": 9.982545454545457e-06,
50
+ "loss": 0.3653,
51
+ "step": 600
52
+ },
53
+ {
54
+ "epoch": 0.13,
55
+ "grad_norm": 2.9246363639831543,
56
+ "learning_rate": 9.964363636363637e-06,
57
+ "loss": 0.3418,
58
+ "step": 700
59
+ },
60
+ {
61
+ "epoch": 0.14,
62
+ "grad_norm": 3.4514670372009277,
63
+ "learning_rate": 9.946181818181819e-06,
64
+ "loss": 0.3389,
65
+ "step": 800
66
+ },
67
+ {
68
+ "epoch": 0.16,
69
+ "grad_norm": 3.152733325958252,
70
+ "learning_rate": 9.928e-06,
71
+ "loss": 0.3159,
72
+ "step": 900
73
+ },
74
+ {
75
+ "epoch": 0.18,
76
+ "grad_norm": 2.9511702060699463,
77
+ "learning_rate": 9.909818181818182e-06,
78
+ "loss": 0.3191,
79
+ "step": 1000
80
+ },
81
+ {
82
+ "epoch": 0.18,
83
+ "eval_loss": 0.356253445148468,
84
+ "eval_runtime": 445.2731,
85
+ "eval_samples_per_second": 136.709,
86
+ "eval_steps_per_second": 4.274,
87
+ "step": 1000
88
+ },
89
+ {
90
+ "epoch": 0.2,
91
+ "grad_norm": 2.535557508468628,
92
+ "learning_rate": 9.891636363636364e-06,
93
+ "loss": 0.3103,
94
+ "step": 1100
95
+ },
96
+ {
97
+ "epoch": 0.22,
98
+ "grad_norm": 2.9129879474639893,
99
+ "learning_rate": 9.873454545454546e-06,
100
+ "loss": 0.3024,
101
+ "step": 1200
102
+ },
103
+ {
104
+ "epoch": 0.23,
105
+ "grad_norm": 3.5554208755493164,
106
+ "learning_rate": 9.855272727272728e-06,
107
+ "loss": 0.3021,
108
+ "step": 1300
109
+ },
110
+ {
111
+ "epoch": 0.25,
112
+ "grad_norm": 3.1638295650482178,
113
+ "learning_rate": 9.83709090909091e-06,
114
+ "loss": 0.2995,
115
+ "step": 1400
116
+ },
117
+ {
118
+ "epoch": 0.27,
119
+ "grad_norm": 3.0975465774536133,
120
+ "learning_rate": 9.818909090909092e-06,
121
+ "loss": 0.3014,
122
+ "step": 1500
123
+ },
124
+ {
125
+ "epoch": 0.29,
126
+ "grad_norm": 3.0966498851776123,
127
+ "learning_rate": 9.800727272727273e-06,
128
+ "loss": 0.2841,
129
+ "step": 1600
130
+ },
131
+ {
132
+ "epoch": 0.31,
133
+ "grad_norm": 2.612806558609009,
134
+ "learning_rate": 9.782545454545455e-06,
135
+ "loss": 0.2821,
136
+ "step": 1700
137
+ },
138
+ {
139
+ "epoch": 0.32,
140
+ "grad_norm": 3.115729808807373,
141
+ "learning_rate": 9.764363636363637e-06,
142
+ "loss": 0.2792,
143
+ "step": 1800
144
+ },
145
+ {
146
+ "epoch": 0.34,
147
+ "grad_norm": 2.9033608436584473,
148
+ "learning_rate": 9.746181818181819e-06,
149
+ "loss": 0.2835,
150
+ "step": 1900
151
+ },
152
+ {
153
+ "epoch": 0.36,
154
+ "grad_norm": 2.7176175117492676,
155
+ "learning_rate": 9.728e-06,
156
+ "loss": 0.2667,
157
+ "step": 2000
158
+ },
159
+ {
160
+ "epoch": 0.36,
161
+ "eval_loss": 0.30776357650756836,
162
+ "eval_runtime": 447.4332,
163
+ "eval_samples_per_second": 136.049,
164
+ "eval_steps_per_second": 4.253,
165
+ "step": 2000
166
+ },
167
+ {
168
+ "epoch": 0.38,
169
+ "grad_norm": 3.1798152923583984,
170
+ "learning_rate": 9.709818181818183e-06,
171
+ "loss": 0.2637,
172
+ "step": 2100
173
+ },
174
+ {
175
+ "epoch": 0.4,
176
+ "grad_norm": 2.6578774452209473,
177
+ "learning_rate": 9.691636363636364e-06,
178
+ "loss": 0.267,
179
+ "step": 2200
180
+ },
181
+ {
182
+ "epoch": 0.41,
183
+ "grad_norm": 2.983422040939331,
184
+ "learning_rate": 9.673636363636364e-06,
185
+ "loss": 0.2674,
186
+ "step": 2300
187
+ },
188
+ {
189
+ "epoch": 0.43,
190
+ "grad_norm": 2.7927327156066895,
191
+ "learning_rate": 9.655454545454547e-06,
192
+ "loss": 0.2575,
193
+ "step": 2400
194
+ },
195
+ {
196
+ "epoch": 0.45,
197
+ "grad_norm": 3.017789363861084,
198
+ "learning_rate": 9.63727272727273e-06,
199
+ "loss": 0.2635,
200
+ "step": 2500
201
+ },
202
+ {
203
+ "epoch": 0.47,
204
+ "grad_norm": 3.1174545288085938,
205
+ "learning_rate": 9.61909090909091e-06,
206
+ "loss": 0.2619,
207
+ "step": 2600
208
+ },
209
+ {
210
+ "epoch": 0.49,
211
+ "grad_norm": 3.1417720317840576,
212
+ "learning_rate": 9.600909090909091e-06,
213
+ "loss": 0.2612,
214
+ "step": 2700
215
+ },
216
+ {
217
+ "epoch": 0.5,
218
+ "grad_norm": 2.9511682987213135,
219
+ "learning_rate": 9.582727272727273e-06,
220
+ "loss": 0.2535,
221
+ "step": 2800
222
+ },
223
+ {
224
+ "epoch": 0.52,
225
+ "grad_norm": 2.670490026473999,
226
+ "learning_rate": 9.564545454545455e-06,
227
+ "loss": 0.2542,
228
+ "step": 2900
229
+ },
230
+ {
231
+ "epoch": 0.54,
232
+ "grad_norm": 2.8334949016571045,
233
+ "learning_rate": 9.546363636363637e-06,
234
+ "loss": 0.2448,
235
+ "step": 3000
236
+ },
237
+ {
238
+ "epoch": 0.54,
239
+ "eval_loss": 0.28384605050086975,
240
+ "eval_runtime": 363.3283,
241
+ "eval_samples_per_second": 167.543,
242
+ "eval_steps_per_second": 5.238,
243
+ "step": 3000
244
+ },
245
+ {
246
+ "epoch": 0.56,
247
+ "grad_norm": 3.076014757156372,
248
+ "learning_rate": 9.528181818181819e-06,
249
+ "loss": 0.2497,
250
+ "step": 3100
251
+ },
252
+ {
253
+ "epoch": 0.58,
254
+ "grad_norm": 2.6771583557128906,
255
+ "learning_rate": 9.51e-06,
256
+ "loss": 0.2474,
257
+ "step": 3200
258
+ },
259
+ {
260
+ "epoch": 0.59,
261
+ "grad_norm": 2.992527484893799,
262
+ "learning_rate": 9.491818181818182e-06,
263
+ "loss": 0.2452,
264
+ "step": 3300
265
+ },
266
+ {
267
+ "epoch": 0.61,
268
+ "grad_norm": 2.8110687732696533,
269
+ "learning_rate": 9.473636363636364e-06,
270
+ "loss": 0.2381,
271
+ "step": 3400
272
+ },
273
+ {
274
+ "epoch": 0.63,
275
+ "grad_norm": 2.5542688369750977,
276
+ "learning_rate": 9.455454545454546e-06,
277
+ "loss": 0.2436,
278
+ "step": 3500
279
+ },
280
+ {
281
+ "epoch": 0.65,
282
+ "grad_norm": 2.7978878021240234,
283
+ "learning_rate": 9.437272727272728e-06,
284
+ "loss": 0.2387,
285
+ "step": 3600
286
+ },
287
+ {
288
+ "epoch": 0.67,
289
+ "grad_norm": 3.2161848545074463,
290
+ "learning_rate": 9.41909090909091e-06,
291
+ "loss": 0.2421,
292
+ "step": 3700
293
+ },
294
+ {
295
+ "epoch": 0.68,
296
+ "grad_norm": 3.0258097648620605,
297
+ "learning_rate": 9.400909090909091e-06,
298
+ "loss": 0.2415,
299
+ "step": 3800
300
+ },
301
+ {
302
+ "epoch": 0.7,
303
+ "grad_norm": 2.86924409866333,
304
+ "learning_rate": 9.382727272727273e-06,
305
+ "loss": 0.2351,
306
+ "step": 3900
307
+ },
308
+ {
309
+ "epoch": 0.72,
310
+ "grad_norm": 2.6659717559814453,
311
+ "learning_rate": 9.364545454545455e-06,
312
+ "loss": 0.2322,
313
+ "step": 4000
314
+ },
315
+ {
316
+ "epoch": 0.72,
317
+ "eval_loss": 0.26763468980789185,
318
+ "eval_runtime": 459.6944,
319
+ "eval_samples_per_second": 132.421,
320
+ "eval_steps_per_second": 4.14,
321
+ "step": 4000
322
+ },
323
+ {
324
+ "epoch": 0.74,
325
+ "grad_norm": 2.9035747051239014,
326
+ "learning_rate": 9.346363636363637e-06,
327
+ "loss": 0.2311,
328
+ "step": 4100
329
+ },
330
+ {
331
+ "epoch": 0.76,
332
+ "grad_norm": 2.607459545135498,
333
+ "learning_rate": 9.328181818181819e-06,
334
+ "loss": 0.2356,
335
+ "step": 4200
336
+ },
337
+ {
338
+ "epoch": 0.77,
339
+ "grad_norm": 3.0524134635925293,
340
+ "learning_rate": 9.31e-06,
341
+ "loss": 0.2315,
342
+ "step": 4300
343
+ },
344
+ {
345
+ "epoch": 0.79,
346
+ "grad_norm": 3.0532467365264893,
347
+ "learning_rate": 9.291818181818182e-06,
348
+ "loss": 0.2275,
349
+ "step": 4400
350
+ },
351
+ {
352
+ "epoch": 0.81,
353
+ "grad_norm": 2.4372193813323975,
354
+ "learning_rate": 9.273636363636364e-06,
355
+ "loss": 0.2373,
356
+ "step": 4500
357
+ },
358
+ {
359
+ "epoch": 0.83,
360
+ "grad_norm": 2.6966984272003174,
361
+ "learning_rate": 9.255454545454546e-06,
362
+ "loss": 0.2235,
363
+ "step": 4600
364
+ },
365
+ {
366
+ "epoch": 0.85,
367
+ "grad_norm": 2.752675771713257,
368
+ "learning_rate": 9.237272727272728e-06,
369
+ "loss": 0.2298,
370
+ "step": 4700
371
+ },
372
+ {
373
+ "epoch": 0.86,
374
+ "grad_norm": 2.880847692489624,
375
+ "learning_rate": 9.21909090909091e-06,
376
+ "loss": 0.2268,
377
+ "step": 4800
378
+ },
379
+ {
380
+ "epoch": 0.88,
381
+ "grad_norm": 2.682913303375244,
382
+ "learning_rate": 9.200909090909092e-06,
383
+ "loss": 0.2224,
384
+ "step": 4900
385
+ },
386
+ {
387
+ "epoch": 0.9,
388
+ "grad_norm": 2.731718063354492,
389
+ "learning_rate": 9.182727272727274e-06,
390
+ "loss": 0.2171,
391
+ "step": 5000
392
+ },
393
+ {
394
+ "epoch": 0.9,
395
+ "eval_loss": 0.25470873713493347,
396
+ "eval_runtime": 486.5055,
397
+ "eval_samples_per_second": 125.123,
398
+ "eval_steps_per_second": 3.912,
399
+ "step": 5000
400
+ },
401
+ {
402
+ "epoch": 0.92,
403
+ "grad_norm": 3.1988534927368164,
404
+ "learning_rate": 9.164545454545455e-06,
405
+ "loss": 0.2215,
406
+ "step": 5100
407
+ },
408
+ {
409
+ "epoch": 0.94,
410
+ "grad_norm": 2.4781415462493896,
411
+ "learning_rate": 9.146363636363637e-06,
412
+ "loss": 0.217,
413
+ "step": 5200
414
+ },
415
+ {
416
+ "epoch": 0.95,
417
+ "grad_norm": 2.7429697513580322,
418
+ "learning_rate": 9.128181818181819e-06,
419
+ "loss": 0.2196,
420
+ "step": 5300
421
+ },
422
+ {
423
+ "epoch": 0.97,
424
+ "grad_norm": 2.868225336074829,
425
+ "learning_rate": 9.110000000000001e-06,
426
+ "loss": 0.2241,
427
+ "step": 5400
428
+ },
429
+ {
430
+ "epoch": 0.99,
431
+ "grad_norm": 2.566040277481079,
432
+ "learning_rate": 9.091818181818183e-06,
433
+ "loss": 0.2208,
434
+ "step": 5500
435
+ },
436
+ {
437
+ "epoch": 1.01,
438
+ "grad_norm": 2.6690568923950195,
439
+ "learning_rate": 9.073636363636365e-06,
440
+ "loss": 0.2082,
441
+ "step": 5600
442
+ },
443
+ {
444
+ "epoch": 1.03,
445
+ "grad_norm": 2.438952684402466,
446
+ "learning_rate": 9.055454545454546e-06,
447
+ "loss": 0.2009,
448
+ "step": 5700
449
+ },
450
+ {
451
+ "epoch": 1.05,
452
+ "grad_norm": 2.5286898612976074,
453
+ "learning_rate": 9.037272727272728e-06,
454
+ "loss": 0.2081,
455
+ "step": 5800
456
+ },
457
+ {
458
+ "epoch": 1.06,
459
+ "grad_norm": 2.2770423889160156,
460
+ "learning_rate": 9.01909090909091e-06,
461
+ "loss": 0.1966,
462
+ "step": 5900
463
+ },
464
+ {
465
+ "epoch": 1.08,
466
+ "grad_norm": 2.502218723297119,
467
+ "learning_rate": 9.000909090909092e-06,
468
+ "loss": 0.2037,
469
+ "step": 6000
470
+ },
471
+ {
472
+ "epoch": 1.08,
473
+ "eval_loss": 0.24770112335681915,
474
+ "eval_runtime": 501.2949,
475
+ "eval_samples_per_second": 121.432,
476
+ "eval_steps_per_second": 3.796,
477
+ "step": 6000
478
+ },
479
+ {
480
+ "epoch": 1.1,
481
+ "grad_norm": 2.348766565322876,
482
+ "learning_rate": 8.982727272727274e-06,
483
+ "loss": 0.1958,
484
+ "step": 6100
485
+ },
486
+ {
487
+ "epoch": 1.12,
488
+ "grad_norm": 2.7815017700195312,
489
+ "learning_rate": 8.964545454545456e-06,
490
+ "loss": 0.1943,
491
+ "step": 6200
492
+ },
493
+ {
494
+ "epoch": 1.14,
495
+ "grad_norm": 2.151355266571045,
496
+ "learning_rate": 8.946363636363637e-06,
497
+ "loss": 0.1911,
498
+ "step": 6300
499
+ },
500
+ {
501
+ "epoch": 1.15,
502
+ "grad_norm": 2.5474178791046143,
503
+ "learning_rate": 8.92818181818182e-06,
504
+ "loss": 0.1988,
505
+ "step": 6400
506
+ },
507
+ {
508
+ "epoch": 1.17,
509
+ "grad_norm": 2.5478312969207764,
510
+ "learning_rate": 8.910181818181819e-06,
511
+ "loss": 0.197,
512
+ "step": 6500
513
+ },
514
+ {
515
+ "epoch": 1.19,
516
+ "grad_norm": 2.358614683151245,
517
+ "learning_rate": 8.892e-06,
518
+ "loss": 0.1918,
519
+ "step": 6600
520
+ },
521
+ {
522
+ "epoch": 1.21,
523
+ "grad_norm": 3.1825013160705566,
524
+ "learning_rate": 8.873818181818182e-06,
525
+ "loss": 0.1976,
526
+ "step": 6700
527
+ },
528
+ {
529
+ "epoch": 1.23,
530
+ "grad_norm": 2.506746530532837,
531
+ "learning_rate": 8.855636363636364e-06,
532
+ "loss": 0.1954,
533
+ "step": 6800
534
+ },
535
+ {
536
+ "epoch": 1.24,
537
+ "grad_norm": 2.5737404823303223,
538
+ "learning_rate": 8.837454545454546e-06,
539
+ "loss": 0.1958,
540
+ "step": 6900
541
+ },
542
+ {
543
+ "epoch": 1.26,
544
+ "grad_norm": 3.3842029571533203,
545
+ "learning_rate": 8.819272727272728e-06,
546
+ "loss": 0.1913,
547
+ "step": 7000
548
+ },
549
+ {
550
+ "epoch": 1.26,
551
+ "eval_loss": 0.23902775347232819,
552
+ "eval_runtime": 354.6913,
553
+ "eval_samples_per_second": 171.622,
554
+ "eval_steps_per_second": 5.365,
555
+ "step": 7000
556
+ },
557
+ {
558
+ "epoch": 1.28,
559
+ "grad_norm": 2.24104642868042,
560
+ "learning_rate": 8.80109090909091e-06,
561
+ "loss": 0.1957,
562
+ "step": 7100
563
+ },
564
+ {
565
+ "epoch": 1.3,
566
+ "grad_norm": 2.8391642570495605,
567
+ "learning_rate": 8.782909090909092e-06,
568
+ "loss": 0.1987,
569
+ "step": 7200
570
+ },
571
+ {
572
+ "epoch": 1.32,
573
+ "grad_norm": 2.0376856327056885,
574
+ "learning_rate": 8.764727272727273e-06,
575
+ "loss": 0.1868,
576
+ "step": 7300
577
+ },
578
+ {
579
+ "epoch": 1.33,
580
+ "grad_norm": 3.101025104522705,
581
+ "learning_rate": 8.746545454545455e-06,
582
+ "loss": 0.188,
583
+ "step": 7400
584
+ },
585
+ {
586
+ "epoch": 1.35,
587
+ "grad_norm": 2.6974034309387207,
588
+ "learning_rate": 8.728363636363637e-06,
589
+ "loss": 0.1945,
590
+ "step": 7500
591
+ },
592
+ {
593
+ "epoch": 1.37,
594
+ "grad_norm": 2.4040937423706055,
595
+ "learning_rate": 8.710181818181819e-06,
596
+ "loss": 0.1914,
597
+ "step": 7600
598
+ },
599
+ {
600
+ "epoch": 1.39,
601
+ "grad_norm": 2.8795206546783447,
602
+ "learning_rate": 8.692e-06,
603
+ "loss": 0.1982,
604
+ "step": 7700
605
+ },
606
+ {
607
+ "epoch": 1.41,
608
+ "grad_norm": 2.352360486984253,
609
+ "learning_rate": 8.673818181818183e-06,
610
+ "loss": 0.1867,
611
+ "step": 7800
612
+ },
613
+ {
614
+ "epoch": 1.42,
615
+ "grad_norm": 2.6391663551330566,
616
+ "learning_rate": 8.655636363636364e-06,
617
+ "loss": 0.1822,
618
+ "step": 7900
619
+ },
620
+ {
621
+ "epoch": 1.44,
622
+ "grad_norm": 2.915161609649658,
623
+ "learning_rate": 8.637454545454546e-06,
624
+ "loss": 0.1903,
625
+ "step": 8000
626
+ },
627
+ {
628
+ "epoch": 1.44,
629
+ "eval_loss": 0.23261629045009613,
630
+ "eval_runtime": 492.9612,
631
+ "eval_samples_per_second": 123.484,
632
+ "eval_steps_per_second": 3.86,
633
+ "step": 8000
634
+ },
635
+ {
636
+ "epoch": 1.46,
637
+ "grad_norm": 2.6614203453063965,
638
+ "learning_rate": 8.619272727272728e-06,
639
+ "loss": 0.1862,
640
+ "step": 8100
641
+ },
642
+ {
643
+ "epoch": 1.48,
644
+ "grad_norm": 2.8267104625701904,
645
+ "learning_rate": 8.60109090909091e-06,
646
+ "loss": 0.1848,
647
+ "step": 8200
648
+ },
649
+ {
650
+ "epoch": 1.5,
651
+ "grad_norm": 2.4803242683410645,
652
+ "learning_rate": 8.582909090909092e-06,
653
+ "loss": 0.1858,
654
+ "step": 8300
655
+ },
656
+ {
657
+ "epoch": 1.51,
658
+ "grad_norm": 3.031543254852295,
659
+ "learning_rate": 8.564727272727274e-06,
660
+ "loss": 0.1816,
661
+ "step": 8400
662
+ },
663
+ {
664
+ "epoch": 1.53,
665
+ "grad_norm": 2.55397629737854,
666
+ "learning_rate": 8.546545454545456e-06,
667
+ "loss": 0.1896,
668
+ "step": 8500
669
+ },
670
+ {
671
+ "epoch": 1.55,
672
+ "grad_norm": 2.7617721557617188,
673
+ "learning_rate": 8.528363636363637e-06,
674
+ "loss": 0.1823,
675
+ "step": 8600
676
+ },
677
+ {
678
+ "epoch": 1.57,
679
+ "grad_norm": 2.09851336479187,
680
+ "learning_rate": 8.510363636363637e-06,
681
+ "loss": 0.1811,
682
+ "step": 8700
683
+ },
684
+ {
685
+ "epoch": 1.59,
686
+ "grad_norm": 2.137387990951538,
687
+ "learning_rate": 8.492363636363638e-06,
688
+ "loss": 0.183,
689
+ "step": 8800
690
+ },
691
+ {
692
+ "epoch": 1.6,
693
+ "grad_norm": 2.270770788192749,
694
+ "learning_rate": 8.47418181818182e-06,
695
+ "loss": 0.1918,
696
+ "step": 8900
697
+ },
698
+ {
699
+ "epoch": 1.62,
700
+ "grad_norm": 2.41756010055542,
701
+ "learning_rate": 8.456000000000002e-06,
702
+ "loss": 0.1829,
703
+ "step": 9000
704
+ },
705
+ {
706
+ "epoch": 1.62,
707
+ "eval_loss": 0.22808903455734253,
708
+ "eval_runtime": 666.6262,
709
+ "eval_samples_per_second": 91.315,
710
+ "eval_steps_per_second": 2.855,
711
+ "step": 9000
712
+ },
713
+ {
714
+ "epoch": 1.64,
715
+ "grad_norm": 2.676539421081543,
716
+ "learning_rate": 8.437818181818182e-06,
717
+ "loss": 0.1869,
718
+ "step": 9100
719
+ },
720
+ {
721
+ "epoch": 1.66,
722
+ "grad_norm": 2.636401414871216,
723
+ "learning_rate": 8.419636363636364e-06,
724
+ "loss": 0.1847,
725
+ "step": 9200
726
+ },
727
+ {
728
+ "epoch": 1.68,
729
+ "grad_norm": 3.2688345909118652,
730
+ "learning_rate": 8.401454545454546e-06,
731
+ "loss": 0.1864,
732
+ "step": 9300
733
+ },
734
+ {
735
+ "epoch": 1.69,
736
+ "grad_norm": 2.5359675884246826,
737
+ "learning_rate": 8.383272727272727e-06,
738
+ "loss": 0.1824,
739
+ "step": 9400
740
+ },
741
+ {
742
+ "epoch": 1.71,
743
+ "grad_norm": 2.2516977787017822,
744
+ "learning_rate": 8.36509090909091e-06,
745
+ "loss": 0.1834,
746
+ "step": 9500
747
+ },
748
+ {
749
+ "epoch": 1.73,
750
+ "grad_norm": 3.052272081375122,
751
+ "learning_rate": 8.346909090909091e-06,
752
+ "loss": 0.1746,
753
+ "step": 9600
754
+ },
755
+ {
756
+ "epoch": 1.75,
757
+ "grad_norm": 3.142702579498291,
758
+ "learning_rate": 8.328727272727275e-06,
759
+ "loss": 0.1887,
760
+ "step": 9700
761
+ },
762
+ {
763
+ "epoch": 1.77,
764
+ "grad_norm": 2.60490345954895,
765
+ "learning_rate": 8.310545454545456e-06,
766
+ "loss": 0.1799,
767
+ "step": 9800
768
+ },
769
+ {
770
+ "epoch": 1.78,
771
+ "grad_norm": 2.252636432647705,
772
+ "learning_rate": 8.292363636363637e-06,
773
+ "loss": 0.1721,
774
+ "step": 9900
775
+ },
776
+ {
777
+ "epoch": 1.8,
778
+ "grad_norm": 2.509241819381714,
779
+ "learning_rate": 8.274181818181818e-06,
780
+ "loss": 0.1822,
781
+ "step": 10000
782
+ },
783
+ {
784
+ "epoch": 1.8,
785
+ "eval_loss": 0.22305500507354736,
786
+ "eval_runtime": 699.489,
787
+ "eval_samples_per_second": 87.025,
788
+ "eval_steps_per_second": 2.721,
789
+ "step": 10000
790
+ },
791
+ {
792
+ "epoch": 1.82,
793
+ "grad_norm": 2.6383330821990967,
794
+ "learning_rate": 8.256e-06,
795
+ "loss": 0.1838,
796
+ "step": 10100
797
+ },
798
+ {
799
+ "epoch": 1.84,
800
+ "grad_norm": 2.926187753677368,
801
+ "learning_rate": 8.237818181818182e-06,
802
+ "loss": 0.1793,
803
+ "step": 10200
804
+ },
805
+ {
806
+ "epoch": 1.86,
807
+ "grad_norm": 2.827836036682129,
808
+ "learning_rate": 8.219636363636364e-06,
809
+ "loss": 0.174,
810
+ "step": 10300
811
+ },
812
+ {
813
+ "epoch": 1.87,
814
+ "grad_norm": 2.2314600944519043,
815
+ "learning_rate": 8.201454545454546e-06,
816
+ "loss": 0.1763,
817
+ "step": 10400
818
+ },
819
+ {
820
+ "epoch": 1.89,
821
+ "grad_norm": 2.6388306617736816,
822
+ "learning_rate": 8.183272727272728e-06,
823
+ "loss": 0.1801,
824
+ "step": 10500
825
+ },
826
+ {
827
+ "epoch": 1.91,
828
+ "grad_norm": 2.648263931274414,
829
+ "learning_rate": 8.165090909090911e-06,
830
+ "loss": 0.1775,
831
+ "step": 10600
832
+ },
833
+ {
834
+ "epoch": 1.93,
835
+ "grad_norm": 2.5469634532928467,
836
+ "learning_rate": 8.146909090909091e-06,
837
+ "loss": 0.1769,
838
+ "step": 10700
839
+ },
840
+ {
841
+ "epoch": 1.95,
842
+ "grad_norm": 2.1157877445220947,
843
+ "learning_rate": 8.128727272727273e-06,
844
+ "loss": 0.1833,
845
+ "step": 10800
846
+ },
847
+ {
848
+ "epoch": 1.96,
849
+ "grad_norm": 2.086653709411621,
850
+ "learning_rate": 8.110545454545455e-06,
851
+ "loss": 0.1787,
852
+ "step": 10900
853
+ },
854
+ {
855
+ "epoch": 1.98,
856
+ "grad_norm": 2.8866491317749023,
857
+ "learning_rate": 8.092363636363637e-06,
858
+ "loss": 0.1729,
859
+ "step": 11000
860
+ },
861
+ {
862
+ "epoch": 1.98,
863
+ "eval_loss": 0.2171076089143753,
864
+ "eval_runtime": 564.1273,
865
+ "eval_samples_per_second": 107.906,
866
+ "eval_steps_per_second": 3.373,
867
+ "step": 11000
868
+ },
869
+ {
870
+ "epoch": 2.0,
871
+ "grad_norm": 2.083310604095459,
872
+ "learning_rate": 8.074181818181819e-06,
873
+ "loss": 0.1774,
874
+ "step": 11100
875
+ },
876
+ {
877
+ "epoch": 2.02,
878
+ "grad_norm": 2.184401750564575,
879
+ "learning_rate": 8.056e-06,
880
+ "loss": 0.1616,
881
+ "step": 11200
882
+ },
883
+ {
884
+ "epoch": 2.04,
885
+ "grad_norm": 2.4366555213928223,
886
+ "learning_rate": 8.037818181818182e-06,
887
+ "loss": 0.1633,
888
+ "step": 11300
889
+ },
890
+ {
891
+ "epoch": 2.05,
892
+ "grad_norm": 2.246950626373291,
893
+ "learning_rate": 8.019636363636364e-06,
894
+ "loss": 0.1623,
895
+ "step": 11400
896
+ },
897
+ {
898
+ "epoch": 2.07,
899
+ "grad_norm": 2.4729185104370117,
900
+ "learning_rate": 8.001454545454546e-06,
901
+ "loss": 0.1544,
902
+ "step": 11500
903
+ },
904
+ {
905
+ "epoch": 2.09,
906
+ "grad_norm": 2.3734850883483887,
907
+ "learning_rate": 7.983272727272728e-06,
908
+ "loss": 0.1595,
909
+ "step": 11600
910
+ },
911
+ {
912
+ "epoch": 2.11,
913
+ "grad_norm": 2.2376785278320312,
914
+ "learning_rate": 7.96509090909091e-06,
915
+ "loss": 0.1561,
916
+ "step": 11700
917
+ },
918
+ {
919
+ "epoch": 2.13,
920
+ "grad_norm": 2.2780098915100098,
921
+ "learning_rate": 7.946909090909091e-06,
922
+ "loss": 0.1549,
923
+ "step": 11800
924
+ },
925
+ {
926
+ "epoch": 2.14,
927
+ "grad_norm": 2.2681996822357178,
928
+ "learning_rate": 7.928727272727273e-06,
929
+ "loss": 0.1641,
930
+ "step": 11900
931
+ },
932
+ {
933
+ "epoch": 2.16,
934
+ "grad_norm": 2.3715972900390625,
935
+ "learning_rate": 7.910545454545455e-06,
936
+ "loss": 0.1611,
937
+ "step": 12000
938
+ },
939
+ {
940
+ "epoch": 2.16,
941
+ "eval_loss": 0.2156703770160675,
942
+ "eval_runtime": 477.7675,
943
+ "eval_samples_per_second": 127.411,
944
+ "eval_steps_per_second": 3.983,
945
+ "step": 12000
946
+ },
947
+ {
948
+ "epoch": 2.18,
949
+ "grad_norm": 1.9450706243515015,
950
+ "learning_rate": 7.892363636363637e-06,
951
+ "loss": 0.152,
952
+ "step": 12100
953
+ },
954
+ {
955
+ "epoch": 2.2,
956
+ "grad_norm": 2.151553153991699,
957
+ "learning_rate": 7.874181818181819e-06,
958
+ "loss": 0.1564,
959
+ "step": 12200
960
+ },
961
+ {
962
+ "epoch": 2.22,
963
+ "grad_norm": 2.2513058185577393,
964
+ "learning_rate": 7.856e-06,
965
+ "loss": 0.154,
966
+ "step": 12300
967
+ },
968
+ {
969
+ "epoch": 2.23,
970
+ "grad_norm": 2.370582342147827,
971
+ "learning_rate": 7.837818181818183e-06,
972
+ "loss": 0.1491,
973
+ "step": 12400
974
+ },
975
+ {
976
+ "epoch": 2.25,
977
+ "grad_norm": 2.598097324371338,
978
+ "learning_rate": 7.819636363636364e-06,
979
+ "loss": 0.1579,
980
+ "step": 12500
981
+ },
982
+ {
983
+ "epoch": 2.27,
984
+ "grad_norm": 2.6642096042633057,
985
+ "learning_rate": 7.801636363636364e-06,
986
+ "loss": 0.1576,
987
+ "step": 12600
988
+ },
989
+ {
990
+ "epoch": 2.29,
991
+ "grad_norm": 2.4024887084960938,
992
+ "learning_rate": 7.783454545454546e-06,
993
+ "loss": 0.1599,
994
+ "step": 12700
995
+ },
996
+ {
997
+ "epoch": 2.31,
998
+ "grad_norm": 2.2930634021759033,
999
+ "learning_rate": 7.765272727272728e-06,
1000
+ "loss": 0.1538,
1001
+ "step": 12800
1002
+ },
1003
+ {
1004
+ "epoch": 2.32,
1005
+ "grad_norm": 2.1747636795043945,
1006
+ "learning_rate": 7.74709090909091e-06,
1007
+ "loss": 0.1598,
1008
+ "step": 12900
1009
+ },
1010
+ {
1011
+ "epoch": 2.34,
1012
+ "grad_norm": 2.725010395050049,
1013
+ "learning_rate": 7.728909090909091e-06,
1014
+ "loss": 0.1509,
1015
+ "step": 13000
1016
+ },
1017
+ {
1018
+ "epoch": 2.34,
1019
+ "eval_loss": 0.21198728680610657,
1020
+ "eval_runtime": 596.3747,
1021
+ "eval_samples_per_second": 102.072,
1022
+ "eval_steps_per_second": 3.191,
1023
+ "step": 13000
1024
+ },
1025
+ {
1026
+ "epoch": 2.36,
1027
+ "grad_norm": 2.2646334171295166,
1028
+ "learning_rate": 7.710727272727273e-06,
1029
+ "loss": 0.1569,
1030
+ "step": 13100
1031
+ },
1032
+ {
1033
+ "epoch": 2.38,
1034
+ "grad_norm": 2.8910107612609863,
1035
+ "learning_rate": 7.692545454545455e-06,
1036
+ "loss": 0.161,
1037
+ "step": 13200
1038
+ },
1039
+ {
1040
+ "epoch": 2.4,
1041
+ "grad_norm": 2.7823307514190674,
1042
+ "learning_rate": 7.674363636363637e-06,
1043
+ "loss": 0.1642,
1044
+ "step": 13300
1045
+ },
1046
+ {
1047
+ "epoch": 2.41,
1048
+ "grad_norm": 3.888598918914795,
1049
+ "learning_rate": 7.656181818181819e-06,
1050
+ "loss": 0.1565,
1051
+ "step": 13400
1052
+ },
1053
+ {
1054
+ "epoch": 2.43,
1055
+ "grad_norm": 2.2702815532684326,
1056
+ "learning_rate": 7.638e-06,
1057
+ "loss": 0.1546,
1058
+ "step": 13500
1059
+ },
1060
+ {
1061
+ "epoch": 2.45,
1062
+ "grad_norm": 2.49238920211792,
1063
+ "learning_rate": 7.619818181818183e-06,
1064
+ "loss": 0.1562,
1065
+ "step": 13600
1066
+ },
1067
+ {
1068
+ "epoch": 2.47,
1069
+ "grad_norm": 2.2743749618530273,
1070
+ "learning_rate": 7.601636363636364e-06,
1071
+ "loss": 0.1611,
1072
+ "step": 13700
1073
+ },
1074
+ {
1075
+ "epoch": 2.49,
1076
+ "grad_norm": 2.550445318222046,
1077
+ "learning_rate": 7.583454545454546e-06,
1078
+ "loss": 0.1569,
1079
+ "step": 13800
1080
+ },
1081
+ {
1082
+ "epoch": 2.5,
1083
+ "grad_norm": 1.858296513557434,
1084
+ "learning_rate": 7.565272727272728e-06,
1085
+ "loss": 0.1642,
1086
+ "step": 13900
1087
+ },
1088
+ {
1089
+ "epoch": 2.52,
1090
+ "grad_norm": 2.714526891708374,
1091
+ "learning_rate": 7.54709090909091e-06,
1092
+ "loss": 0.1552,
1093
+ "step": 14000
1094
+ },
1095
+ {
1096
+ "epoch": 2.52,
1097
+ "eval_loss": 0.20844437181949615,
1098
+ "eval_runtime": 567.6604,
1099
+ "eval_samples_per_second": 107.235,
1100
+ "eval_steps_per_second": 3.352,
1101
+ "step": 14000
1102
+ },
1103
+ {
1104
+ "epoch": 2.54,
1105
+ "grad_norm": 2.246244430541992,
1106
+ "learning_rate": 7.528909090909091e-06,
1107
+ "loss": 0.1545,
1108
+ "step": 14100
1109
+ },
1110
+ {
1111
+ "epoch": 2.56,
1112
+ "grad_norm": 2.7097859382629395,
1113
+ "learning_rate": 7.510727272727273e-06,
1114
+ "loss": 0.1547,
1115
+ "step": 14200
1116
+ },
1117
+ {
1118
+ "epoch": 2.58,
1119
+ "grad_norm": 3.1044564247131348,
1120
+ "learning_rate": 7.492545454545456e-06,
1121
+ "loss": 0.1561,
1122
+ "step": 14300
1123
+ },
1124
+ {
1125
+ "epoch": 2.59,
1126
+ "grad_norm": 3.426424741744995,
1127
+ "learning_rate": 7.474363636363638e-06,
1128
+ "loss": 0.1545,
1129
+ "step": 14400
1130
+ },
1131
+ {
1132
+ "epoch": 2.61,
1133
+ "grad_norm": 2.8595056533813477,
1134
+ "learning_rate": 7.456181818181819e-06,
1135
+ "loss": 0.157,
1136
+ "step": 14500
1137
+ },
1138
+ {
1139
+ "epoch": 2.63,
1140
+ "grad_norm": 2.094259023666382,
1141
+ "learning_rate": 7.438000000000001e-06,
1142
+ "loss": 0.1582,
1143
+ "step": 14600
1144
+ },
1145
+ {
1146
+ "epoch": 2.65,
1147
+ "grad_norm": 2.1589813232421875,
1148
+ "learning_rate": 7.4198181818181825e-06,
1149
+ "loss": 0.1608,
1150
+ "step": 14700
1151
+ },
1152
+ {
1153
+ "epoch": 2.67,
1154
+ "grad_norm": 2.2940006256103516,
1155
+ "learning_rate": 7.401636363636364e-06,
1156
+ "loss": 0.1468,
1157
+ "step": 14800
1158
+ },
1159
+ {
1160
+ "epoch": 2.68,
1161
+ "grad_norm": 2.0454187393188477,
1162
+ "learning_rate": 7.383454545454546e-06,
1163
+ "loss": 0.1535,
1164
+ "step": 14900
1165
+ },
1166
+ {
1167
+ "epoch": 2.7,
1168
+ "grad_norm": 2.362870693206787,
1169
+ "learning_rate": 7.365272727272728e-06,
1170
+ "loss": 0.1487,
1171
+ "step": 15000
1172
+ },
1173
+ {
1174
+ "epoch": 2.7,
1175
+ "eval_loss": 0.20696710050106049,
1176
+ "eval_runtime": 550.539,
1177
+ "eval_samples_per_second": 110.57,
1178
+ "eval_steps_per_second": 3.457,
1179
+ "step": 15000
1180
+ },
1181
+ {
1182
+ "epoch": 2.72,
1183
+ "grad_norm": 2.293924331665039,
1184
+ "learning_rate": 7.347090909090909e-06,
1185
+ "loss": 0.1565,
1186
+ "step": 15100
1187
+ },
1188
+ {
1189
+ "epoch": 2.74,
1190
+ "grad_norm": 2.045642614364624,
1191
+ "learning_rate": 7.328909090909091e-06,
1192
+ "loss": 0.1545,
1193
+ "step": 15200
1194
+ },
1195
+ {
1196
+ "epoch": 2.76,
1197
+ "grad_norm": 2.3518779277801514,
1198
+ "learning_rate": 7.3107272727272735e-06,
1199
+ "loss": 0.1581,
1200
+ "step": 15300
1201
+ },
1202
+ {
1203
+ "epoch": 2.77,
1204
+ "grad_norm": 2.111579418182373,
1205
+ "learning_rate": 7.292545454545455e-06,
1206
+ "loss": 0.1578,
1207
+ "step": 15400
1208
+ },
1209
+ {
1210
+ "epoch": 2.79,
1211
+ "grad_norm": 2.008013963699341,
1212
+ "learning_rate": 7.274363636363637e-06,
1213
+ "loss": 0.1459,
1214
+ "step": 15500
1215
+ },
1216
+ {
1217
+ "epoch": 2.81,
1218
+ "grad_norm": 2.2898120880126953,
1219
+ "learning_rate": 7.256181818181819e-06,
1220
+ "loss": 0.1543,
1221
+ "step": 15600
1222
+ },
1223
+ {
1224
+ "epoch": 2.83,
1225
+ "grad_norm": 2.504873752593994,
1226
+ "learning_rate": 7.238000000000001e-06,
1227
+ "loss": 0.1485,
1228
+ "step": 15700
1229
+ },
1230
+ {
1231
+ "epoch": 2.85,
1232
+ "grad_norm": 2.294981002807617,
1233
+ "learning_rate": 7.219818181818183e-06,
1234
+ "loss": 0.1542,
1235
+ "step": 15800
1236
+ },
1237
+ {
1238
+ "epoch": 2.86,
1239
+ "grad_norm": 2.4189417362213135,
1240
+ "learning_rate": 7.201636363636364e-06,
1241
+ "loss": 0.1523,
1242
+ "step": 15900
1243
+ },
1244
+ {
1245
+ "epoch": 2.88,
1246
+ "grad_norm": 2.0170607566833496,
1247
+ "learning_rate": 7.1834545454545455e-06,
1248
+ "loss": 0.1492,
1249
+ "step": 16000
1250
+ },
1251
+ {
1252
+ "epoch": 2.88,
1253
+ "eval_loss": 0.20441067218780518,
1254
+ "eval_runtime": 568.7903,
1255
+ "eval_samples_per_second": 107.022,
1256
+ "eval_steps_per_second": 3.346,
1257
+ "step": 16000
1258
+ },
1259
+ {
1260
+ "epoch": 2.9,
1261
+ "grad_norm": 2.5287556648254395,
1262
+ "learning_rate": 7.165272727272727e-06,
1263
+ "loss": 0.1519,
1264
+ "step": 16100
1265
+ },
1266
+ {
1267
+ "epoch": 2.92,
1268
+ "grad_norm": 2.236844301223755,
1269
+ "learning_rate": 7.1472727272727285e-06,
1270
+ "loss": 0.1519,
1271
+ "step": 16200
1272
+ },
1273
+ {
1274
+ "epoch": 2.94,
1275
+ "grad_norm": 2.3229405879974365,
1276
+ "learning_rate": 7.12909090909091e-06,
1277
+ "loss": 0.1561,
1278
+ "step": 16300
1279
+ },
1280
+ {
1281
+ "epoch": 2.95,
1282
+ "grad_norm": 2.1688249111175537,
1283
+ "learning_rate": 7.110909090909091e-06,
1284
+ "loss": 0.1471,
1285
+ "step": 16400
1286
+ },
1287
+ {
1288
+ "epoch": 2.97,
1289
+ "grad_norm": 2.4697909355163574,
1290
+ "learning_rate": 7.092727272727273e-06,
1291
+ "loss": 0.1538,
1292
+ "step": 16500
1293
+ },
1294
+ {
1295
+ "epoch": 2.99,
1296
+ "grad_norm": 2.3912057876586914,
1297
+ "learning_rate": 7.074545454545455e-06,
1298
+ "loss": 0.1607,
1299
+ "step": 16600
1300
+ },
1301
+ {
1302
+ "epoch": 3.01,
1303
+ "grad_norm": 3.0056750774383545,
1304
+ "learning_rate": 7.056363636363637e-06,
1305
+ "loss": 0.1468,
1306
+ "step": 16700
1307
+ },
1308
+ {
1309
+ "epoch": 3.03,
1310
+ "grad_norm": 2.248894691467285,
1311
+ "learning_rate": 7.038181818181819e-06,
1312
+ "loss": 0.139,
1313
+ "step": 16800
1314
+ },
1315
+ {
1316
+ "epoch": 3.05,
1317
+ "grad_norm": 2.268486976623535,
1318
+ "learning_rate": 7.0200000000000006e-06,
1319
+ "loss": 0.1363,
1320
+ "step": 16900
1321
+ },
1322
+ {
1323
+ "epoch": 3.06,
1324
+ "grad_norm": 1.9803905487060547,
1325
+ "learning_rate": 7.0018181818181815e-06,
1326
+ "loss": 0.1317,
1327
+ "step": 17000
1328
+ },
1329
+ {
1330
+ "epoch": 3.06,
1331
+ "eval_loss": 0.20176592469215393,
1332
+ "eval_runtime": 557.9491,
1333
+ "eval_samples_per_second": 109.101,
1334
+ "eval_steps_per_second": 3.411,
1335
+ "step": 17000
1336
+ },
1337
+ {
1338
+ "epoch": 3.08,
1339
+ "grad_norm": 2.0611746311187744,
1340
+ "learning_rate": 6.983636363636365e-06,
1341
+ "loss": 0.135,
1342
+ "step": 17100
1343
+ },
1344
+ {
1345
+ "epoch": 3.1,
1346
+ "grad_norm": 1.9974679946899414,
1347
+ "learning_rate": 6.965454545454546e-06,
1348
+ "loss": 0.1393,
1349
+ "step": 17200
1350
+ },
1351
+ {
1352
+ "epoch": 3.12,
1353
+ "grad_norm": 2.247957229614258,
1354
+ "learning_rate": 6.947272727272728e-06,
1355
+ "loss": 0.1377,
1356
+ "step": 17300
1357
+ },
1358
+ {
1359
+ "epoch": 3.14,
1360
+ "grad_norm": 2.3139231204986572,
1361
+ "learning_rate": 6.92909090909091e-06,
1362
+ "loss": 0.1402,
1363
+ "step": 17400
1364
+ },
1365
+ {
1366
+ "epoch": 3.15,
1367
+ "grad_norm": 2.253744602203369,
1368
+ "learning_rate": 6.910909090909092e-06,
1369
+ "loss": 0.1364,
1370
+ "step": 17500
1371
+ },
1372
+ {
1373
+ "epoch": 3.17,
1374
+ "grad_norm": 2.3517744541168213,
1375
+ "learning_rate": 6.892727272727273e-06,
1376
+ "loss": 0.137,
1377
+ "step": 17600
1378
+ },
1379
+ {
1380
+ "epoch": 3.19,
1381
+ "grad_norm": 1.9931228160858154,
1382
+ "learning_rate": 6.874545454545455e-06,
1383
+ "loss": 0.1417,
1384
+ "step": 17700
1385
+ },
1386
+ {
1387
+ "epoch": 3.21,
1388
+ "grad_norm": 1.9905173778533936,
1389
+ "learning_rate": 6.856363636363636e-06,
1390
+ "loss": 0.1337,
1391
+ "step": 17800
1392
+ },
1393
+ {
1394
+ "epoch": 3.23,
1395
+ "grad_norm": 2.7830097675323486,
1396
+ "learning_rate": 6.838181818181818e-06,
1397
+ "loss": 0.1364,
1398
+ "step": 17900
1399
+ },
1400
+ {
1401
+ "epoch": 3.24,
1402
+ "grad_norm": 2.7897889614105225,
1403
+ "learning_rate": 6.820000000000001e-06,
1404
+ "loss": 0.1361,
1405
+ "step": 18000
1406
+ },
1407
+ {
1408
+ "epoch": 3.24,
1409
+ "eval_loss": 0.20188076794147491,
1410
+ "eval_runtime": 572.624,
1411
+ "eval_samples_per_second": 106.305,
1412
+ "eval_steps_per_second": 3.323,
1413
+ "step": 18000
1414
+ },
1415
+ {
1416
+ "epoch": 3.26,
1417
+ "grad_norm": 2.2669436931610107,
1418
+ "learning_rate": 6.801818181818183e-06,
1419
+ "loss": 0.1394,
1420
+ "step": 18100
1421
+ },
1422
+ {
1423
+ "epoch": 3.28,
1424
+ "grad_norm": 1.8945894241333008,
1425
+ "learning_rate": 6.7836363636363644e-06,
1426
+ "loss": 0.1358,
1427
+ "step": 18200
1428
+ },
1429
+ {
1430
+ "epoch": 3.3,
1431
+ "grad_norm": 2.3417062759399414,
1432
+ "learning_rate": 6.765454545454546e-06,
1433
+ "loss": 0.1369,
1434
+ "step": 18300
1435
+ },
1436
+ {
1437
+ "epoch": 3.32,
1438
+ "grad_norm": 2.118128776550293,
1439
+ "learning_rate": 6.747272727272728e-06,
1440
+ "loss": 0.1324,
1441
+ "step": 18400
1442
+ },
1443
+ {
1444
+ "epoch": 3.33,
1445
+ "grad_norm": 2.1800739765167236,
1446
+ "learning_rate": 6.72909090909091e-06,
1447
+ "loss": 0.1366,
1448
+ "step": 18500
1449
+ },
1450
+ {
1451
+ "epoch": 3.35,
1452
+ "grad_norm": 2.5400383472442627,
1453
+ "learning_rate": 6.710909090909091e-06,
1454
+ "loss": 0.1364,
1455
+ "step": 18600
1456
+ },
1457
+ {
1458
+ "epoch": 3.37,
1459
+ "grad_norm": 2.051309823989868,
1460
+ "learning_rate": 6.692727272727273e-06,
1461
+ "loss": 0.1328,
1462
+ "step": 18700
1463
+ },
1464
+ {
1465
+ "epoch": 3.39,
1466
+ "grad_norm": 2.2389767169952393,
1467
+ "learning_rate": 6.674545454545455e-06,
1468
+ "loss": 0.1379,
1469
+ "step": 18800
1470
+ },
1471
+ {
1472
+ "epoch": 3.41,
1473
+ "grad_norm": 2.198885917663574,
1474
+ "learning_rate": 6.6563636363636365e-06,
1475
+ "loss": 0.1405,
1476
+ "step": 18900
1477
+ },
1478
+ {
1479
+ "epoch": 3.42,
1480
+ "grad_norm": 2.476261615753174,
1481
+ "learning_rate": 6.638181818181819e-06,
1482
+ "loss": 0.1358,
1483
+ "step": 19000
1484
+ },
1485
+ {
1486
+ "epoch": 3.42,
1487
+ "eval_loss": 0.19895973801612854,
1488
+ "eval_runtime": 554.3371,
1489
+ "eval_samples_per_second": 109.812,
1490
+ "eval_steps_per_second": 3.433,
1491
+ "step": 19000
1492
+ },
1493
+ {
1494
+ "epoch": 3.44,
1495
+ "grad_norm": 2.0383458137512207,
1496
+ "learning_rate": 6.620000000000001e-06,
1497
+ "loss": 0.1382,
1498
+ "step": 19100
1499
+ },
1500
+ {
1501
+ "epoch": 3.46,
1502
+ "grad_norm": 2.5287790298461914,
1503
+ "learning_rate": 6.601818181818183e-06,
1504
+ "loss": 0.1388,
1505
+ "step": 19200
1506
+ },
1507
+ {
1508
+ "epoch": 3.48,
1509
+ "grad_norm": 2.468118667602539,
1510
+ "learning_rate": 6.583636363636365e-06,
1511
+ "loss": 0.1387,
1512
+ "step": 19300
1513
+ },
1514
+ {
1515
+ "epoch": 3.5,
1516
+ "grad_norm": 2.280622959136963,
1517
+ "learning_rate": 6.565454545454546e-06,
1518
+ "loss": 0.1352,
1519
+ "step": 19400
1520
+ },
1521
+ {
1522
+ "epoch": 3.51,
1523
+ "grad_norm": 2.0358574390411377,
1524
+ "learning_rate": 6.5472727272727275e-06,
1525
+ "loss": 0.1374,
1526
+ "step": 19500
1527
+ },
1528
+ {
1529
+ "epoch": 3.53,
1530
+ "grad_norm": 2.251955986022949,
1531
+ "learning_rate": 6.529090909090909e-06,
1532
+ "loss": 0.1411,
1533
+ "step": 19600
1534
+ },
1535
+ {
1536
+ "epoch": 3.55,
1537
+ "grad_norm": 2.215778350830078,
1538
+ "learning_rate": 6.510909090909091e-06,
1539
+ "loss": 0.1422,
1540
+ "step": 19700
1541
+ },
1542
+ {
1543
+ "epoch": 3.57,
1544
+ "grad_norm": 2.3054020404815674,
1545
+ "learning_rate": 6.492727272727273e-06,
1546
+ "loss": 0.1352,
1547
+ "step": 19800
1548
+ },
1549
+ {
1550
+ "epoch": 3.59,
1551
+ "grad_norm": 2.643420934677124,
1552
+ "learning_rate": 6.474545454545456e-06,
1553
+ "loss": 0.1357,
1554
+ "step": 19900
1555
+ },
1556
+ {
1557
+ "epoch": 3.6,
1558
+ "grad_norm": 2.40155291557312,
1559
+ "learning_rate": 6.4563636363636375e-06,
1560
+ "loss": 0.1382,
1561
+ "step": 20000
1562
+ },
1563
+ {
1564
+ "epoch": 3.6,
1565
+ "eval_loss": 0.19792823493480682,
1566
+ "eval_runtime": 559.0516,
1567
+ "eval_samples_per_second": 108.886,
1568
+ "eval_steps_per_second": 3.404,
1569
+ "step": 20000
1570
+ },
1571
+ {
1572
+ "epoch": 3.62,
1573
+ "grad_norm": 2.9484267234802246,
1574
+ "learning_rate": 6.438181818181819e-06,
1575
+ "loss": 0.1353,
1576
+ "step": 20100
1577
+ },
1578
+ {
1579
+ "epoch": 3.64,
1580
+ "grad_norm": 2.413797616958618,
1581
+ "learning_rate": 6.42e-06,
1582
+ "loss": 0.1361,
1583
+ "step": 20200
1584
+ },
1585
+ {
1586
+ "epoch": 3.66,
1587
+ "grad_norm": 2.0721383094787598,
1588
+ "learning_rate": 6.402000000000001e-06,
1589
+ "loss": 0.1338,
1590
+ "step": 20300
1591
+ },
1592
+ {
1593
+ "epoch": 3.68,
1594
+ "grad_norm": 2.4742093086242676,
1595
+ "learning_rate": 6.384e-06,
1596
+ "loss": 0.1337,
1597
+ "step": 20400
1598
+ },
1599
+ {
1600
+ "epoch": 3.69,
1601
+ "grad_norm": 3.042827606201172,
1602
+ "learning_rate": 6.365818181818182e-06,
1603
+ "loss": 0.1385,
1604
+ "step": 20500
1605
+ },
1606
+ {
1607
+ "epoch": 3.71,
1608
+ "grad_norm": 2.4437592029571533,
1609
+ "learning_rate": 6.347636363636365e-06,
1610
+ "loss": 0.135,
1611
+ "step": 20600
1612
+ },
1613
+ {
1614
+ "epoch": 3.73,
1615
+ "grad_norm": 1.9226901531219482,
1616
+ "learning_rate": 6.3294545454545466e-06,
1617
+ "loss": 0.1322,
1618
+ "step": 20700
1619
+ },
1620
+ {
1621
+ "epoch": 3.75,
1622
+ "grad_norm": 2.4258248805999756,
1623
+ "learning_rate": 6.311272727272728e-06,
1624
+ "loss": 0.1385,
1625
+ "step": 20800
1626
+ },
1627
+ {
1628
+ "epoch": 3.77,
1629
+ "grad_norm": 2.3766396045684814,
1630
+ "learning_rate": 6.293090909090909e-06,
1631
+ "loss": 0.1331,
1632
+ "step": 20900
1633
+ },
1634
+ {
1635
+ "epoch": 3.78,
1636
+ "grad_norm": 2.920332670211792,
1637
+ "learning_rate": 6.274909090909091e-06,
1638
+ "loss": 0.1313,
1639
+ "step": 21000
1640
+ },
1641
+ {
1642
+ "epoch": 3.78,
1643
+ "eval_loss": 0.1950678676366806,
1644
+ "eval_runtime": 562.0997,
1645
+ "eval_samples_per_second": 108.296,
1646
+ "eval_steps_per_second": 3.386,
1647
+ "step": 21000
1648
+ },
1649
+ {
1650
+ "epoch": 3.8,
1651
+ "grad_norm": 2.008997917175293,
1652
+ "learning_rate": 6.256727272727273e-06,
1653
+ "loss": 0.1331,
1654
+ "step": 21100
1655
+ },
1656
+ {
1657
+ "epoch": 3.82,
1658
+ "grad_norm": 2.3787338733673096,
1659
+ "learning_rate": 6.238545454545455e-06,
1660
+ "loss": 0.1321,
1661
+ "step": 21200
1662
+ },
1663
+ {
1664
+ "epoch": 3.84,
1665
+ "grad_norm": 2.1688482761383057,
1666
+ "learning_rate": 6.220363636363637e-06,
1667
+ "loss": 0.1347,
1668
+ "step": 21300
1669
+ },
1670
+ {
1671
+ "epoch": 3.86,
1672
+ "grad_norm": 2.0800232887268066,
1673
+ "learning_rate": 6.202181818181819e-06,
1674
+ "loss": 0.135,
1675
+ "step": 21400
1676
+ },
1677
+ {
1678
+ "epoch": 3.87,
1679
+ "grad_norm": 2.2381479740142822,
1680
+ "learning_rate": 6.184e-06,
1681
+ "loss": 0.1333,
1682
+ "step": 21500
1683
+ },
1684
+ {
1685
+ "epoch": 3.89,
1686
+ "grad_norm": 2.4402754306793213,
1687
+ "learning_rate": 6.165818181818183e-06,
1688
+ "loss": 0.137,
1689
+ "step": 21600
1690
+ },
1691
+ {
1692
+ "epoch": 3.91,
1693
+ "grad_norm": 2.3774378299713135,
1694
+ "learning_rate": 6.147636363636364e-06,
1695
+ "loss": 0.137,
1696
+ "step": 21700
1697
+ },
1698
+ {
1699
+ "epoch": 3.93,
1700
+ "grad_norm": 2.5666518211364746,
1701
+ "learning_rate": 6.129454545454546e-06,
1702
+ "loss": 0.1364,
1703
+ "step": 21800
1704
+ },
1705
+ {
1706
+ "epoch": 3.95,
1707
+ "grad_norm": 2.7530879974365234,
1708
+ "learning_rate": 6.111272727272728e-06,
1709
+ "loss": 0.1329,
1710
+ "step": 21900
1711
+ },
1712
+ {
1713
+ "epoch": 3.96,
1714
+ "grad_norm": 2.2446491718292236,
1715
+ "learning_rate": 6.09309090909091e-06,
1716
+ "loss": 0.1286,
1717
+ "step": 22000
1718
+ },
1719
+ {
1720
+ "epoch": 3.96,
1721
+ "eval_loss": 0.19367100298404694,
1722
+ "eval_runtime": 556.8108,
1723
+ "eval_samples_per_second": 109.324,
1724
+ "eval_steps_per_second": 3.418,
1725
+ "step": 22000
1726
+ },
1727
+ {
1728
+ "epoch": 3.98,
1729
+ "grad_norm": 2.472163200378418,
1730
+ "learning_rate": 6.0749090909090915e-06,
1731
+ "loss": 0.135,
1732
+ "step": 22100
1733
+ },
1734
+ {
1735
+ "epoch": 4.0,
1736
+ "grad_norm": 2.458562135696411,
1737
+ "learning_rate": 6.056727272727273e-06,
1738
+ "loss": 0.1343,
1739
+ "step": 22200
1740
+ },
1741
+ {
1742
+ "epoch": 4.02,
1743
+ "grad_norm": 2.642648696899414,
1744
+ "learning_rate": 6.038545454545455e-06,
1745
+ "loss": 0.1256,
1746
+ "step": 22300
1747
+ },
1748
+ {
1749
+ "epoch": 4.04,
1750
+ "grad_norm": 1.9326255321502686,
1751
+ "learning_rate": 6.020363636363636e-06,
1752
+ "loss": 0.1178,
1753
+ "step": 22400
1754
+ },
1755
+ {
1756
+ "epoch": 4.05,
1757
+ "grad_norm": 2.303805112838745,
1758
+ "learning_rate": 6.002181818181819e-06,
1759
+ "loss": 0.1262,
1760
+ "step": 22500
1761
+ },
1762
+ {
1763
+ "epoch": 4.07,
1764
+ "grad_norm": 1.9432106018066406,
1765
+ "learning_rate": 5.984000000000001e-06,
1766
+ "loss": 0.1178,
1767
+ "step": 22600
1768
+ },
1769
+ {
1770
+ "epoch": 4.09,
1771
+ "grad_norm": 2.2067601680755615,
1772
+ "learning_rate": 5.9658181818181825e-06,
1773
+ "loss": 0.1163,
1774
+ "step": 22700
1775
+ },
1776
+ {
1777
+ "epoch": 4.11,
1778
+ "grad_norm": 1.9173979759216309,
1779
+ "learning_rate": 5.947636363636364e-06,
1780
+ "loss": 0.1182,
1781
+ "step": 22800
1782
+ },
1783
+ {
1784
+ "epoch": 4.13,
1785
+ "grad_norm": 1.9207819700241089,
1786
+ "learning_rate": 5.929454545454546e-06,
1787
+ "loss": 0.1195,
1788
+ "step": 22900
1789
+ },
1790
+ {
1791
+ "epoch": 4.14,
1792
+ "grad_norm": 2.341498851776123,
1793
+ "learning_rate": 5.911272727272728e-06,
1794
+ "loss": 0.1223,
1795
+ "step": 23000
1796
+ },
1797
+ {
1798
+ "epoch": 4.14,
1799
+ "eval_loss": 0.19433893263339996,
1800
+ "eval_runtime": 559.9176,
1801
+ "eval_samples_per_second": 108.718,
1802
+ "eval_steps_per_second": 3.399,
1803
+ "step": 23000
1804
+ },
1805
+ {
1806
+ "epoch": 4.16,
1807
+ "grad_norm": 1.9997601509094238,
1808
+ "learning_rate": 5.89309090909091e-06,
1809
+ "loss": 0.128,
1810
+ "step": 23100
1811
+ },
1812
+ {
1813
+ "epoch": 4.18,
1814
+ "grad_norm": 1.9088443517684937,
1815
+ "learning_rate": 5.874909090909091e-06,
1816
+ "loss": 0.1219,
1817
+ "step": 23200
1818
+ },
1819
+ {
1820
+ "epoch": 4.2,
1821
+ "grad_norm": 2.0948617458343506,
1822
+ "learning_rate": 5.856909090909091e-06,
1823
+ "loss": 0.1247,
1824
+ "step": 23300
1825
+ },
1826
+ {
1827
+ "epoch": 4.22,
1828
+ "grad_norm": 2.289156436920166,
1829
+ "learning_rate": 5.838727272727274e-06,
1830
+ "loss": 0.1207,
1831
+ "step": 23400
1832
+ },
1833
+ {
1834
+ "epoch": 4.23,
1835
+ "grad_norm": 3.1126627922058105,
1836
+ "learning_rate": 5.820545454545456e-06,
1837
+ "loss": 0.1211,
1838
+ "step": 23500
1839
+ },
1840
+ {
1841
+ "epoch": 4.25,
1842
+ "grad_norm": 2.562894582748413,
1843
+ "learning_rate": 5.802363636363637e-06,
1844
+ "loss": 0.1216,
1845
+ "step": 23600
1846
+ },
1847
+ {
1848
+ "epoch": 4.27,
1849
+ "grad_norm": 2.3009512424468994,
1850
+ "learning_rate": 5.7841818181818185e-06,
1851
+ "loss": 0.1228,
1852
+ "step": 23700
1853
+ },
1854
+ {
1855
+ "epoch": 4.29,
1856
+ "grad_norm": 2.5779926776885986,
1857
+ "learning_rate": 5.766e-06,
1858
+ "loss": 0.1214,
1859
+ "step": 23800
1860
+ },
1861
+ {
1862
+ "epoch": 4.31,
1863
+ "grad_norm": 2.466285228729248,
1864
+ "learning_rate": 5.747818181818182e-06,
1865
+ "loss": 0.1206,
1866
+ "step": 23900
1867
+ },
1868
+ {
1869
+ "epoch": 4.32,
1870
+ "grad_norm": 2.0055696964263916,
1871
+ "learning_rate": 5.729636363636364e-06,
1872
+ "loss": 0.1256,
1873
+ "step": 24000
1874
+ },
1875
+ {
1876
+ "epoch": 4.32,
1877
+ "eval_loss": 0.1951528936624527,
1878
+ "eval_runtime": 566.0319,
1879
+ "eval_samples_per_second": 107.543,
1880
+ "eval_steps_per_second": 3.362,
1881
+ "step": 24000
1882
+ },
1883
+ {
1884
+ "epoch": 4.34,
1885
+ "grad_norm": 1.853898525238037,
1886
+ "learning_rate": 5.711454545454546e-06,
1887
+ "loss": 0.121,
1888
+ "step": 24100
1889
+ },
1890
+ {
1891
+ "epoch": 4.36,
1892
+ "grad_norm": 2.03159499168396,
1893
+ "learning_rate": 5.693272727272727e-06,
1894
+ "loss": 0.1252,
1895
+ "step": 24200
1896
+ },
1897
+ {
1898
+ "epoch": 4.38,
1899
+ "grad_norm": 2.1461386680603027,
1900
+ "learning_rate": 5.67509090909091e-06,
1901
+ "loss": 0.1165,
1902
+ "step": 24300
1903
+ },
1904
+ {
1905
+ "epoch": 4.4,
1906
+ "grad_norm": 2.047924041748047,
1907
+ "learning_rate": 5.656909090909091e-06,
1908
+ "loss": 0.127,
1909
+ "step": 24400
1910
+ },
1911
+ {
1912
+ "epoch": 4.41,
1913
+ "grad_norm": 2.1523540019989014,
1914
+ "learning_rate": 5.638727272727273e-06,
1915
+ "loss": 0.1201,
1916
+ "step": 24500
1917
+ },
1918
+ {
1919
+ "epoch": 4.43,
1920
+ "grad_norm": 1.8761101961135864,
1921
+ "learning_rate": 5.620545454545455e-06,
1922
+ "loss": 0.1204,
1923
+ "step": 24600
1924
+ },
1925
+ {
1926
+ "epoch": 4.45,
1927
+ "grad_norm": 1.9362976551055908,
1928
+ "learning_rate": 5.602363636363637e-06,
1929
+ "loss": 0.1163,
1930
+ "step": 24700
1931
+ },
1932
+ {
1933
+ "epoch": 4.47,
1934
+ "grad_norm": 2.4700512886047363,
1935
+ "learning_rate": 5.584181818181819e-06,
1936
+ "loss": 0.1177,
1937
+ "step": 24800
1938
+ },
1939
+ {
1940
+ "epoch": 4.49,
1941
+ "grad_norm": 2.274169683456421,
1942
+ "learning_rate": 5.566000000000001e-06,
1943
+ "loss": 0.125,
1944
+ "step": 24900
1945
+ },
1946
+ {
1947
+ "epoch": 4.5,
1948
+ "grad_norm": 2.2185416221618652,
1949
+ "learning_rate": 5.5478181818181816e-06,
1950
+ "loss": 0.1216,
1951
+ "step": 25000
1952
+ },
1953
+ {
1954
+ "epoch": 4.5,
1955
+ "eval_loss": 0.19246701896190643,
1956
+ "eval_runtime": 562.9559,
1957
+ "eval_samples_per_second": 108.131,
1958
+ "eval_steps_per_second": 3.38,
1959
+ "step": 25000
1960
+ },
1961
+ {
1962
+ "epoch": 4.52,
1963
+ "grad_norm": 1.9934074878692627,
1964
+ "learning_rate": 5.529636363636363e-06,
1965
+ "loss": 0.1204,
1966
+ "step": 25100
1967
+ },
1968
+ {
1969
+ "epoch": 4.54,
1970
+ "grad_norm": 1.9352362155914307,
1971
+ "learning_rate": 5.511454545454545e-06,
1972
+ "loss": 0.1221,
1973
+ "step": 25200
1974
+ },
1975
+ {
1976
+ "epoch": 4.56,
1977
+ "grad_norm": 2.508136034011841,
1978
+ "learning_rate": 5.493272727272728e-06,
1979
+ "loss": 0.1243,
1980
+ "step": 25300
1981
+ },
1982
+ {
1983
+ "epoch": 4.58,
1984
+ "grad_norm": 1.9421477317810059,
1985
+ "learning_rate": 5.47509090909091e-06,
1986
+ "loss": 0.1257,
1987
+ "step": 25400
1988
+ },
1989
+ {
1990
+ "epoch": 4.59,
1991
+ "grad_norm": 2.0170023441314697,
1992
+ "learning_rate": 5.456909090909092e-06,
1993
+ "loss": 0.1213,
1994
+ "step": 25500
1995
+ },
1996
+ {
1997
+ "epoch": 4.61,
1998
+ "grad_norm": 2.4295244216918945,
1999
+ "learning_rate": 5.438909090909091e-06,
2000
+ "loss": 0.1285,
2001
+ "step": 25600
2002
+ },
2003
+ {
2004
+ "epoch": 4.63,
2005
+ "grad_norm": 2.2002458572387695,
2006
+ "learning_rate": 5.420727272727273e-06,
2007
+ "loss": 0.1216,
2008
+ "step": 25700
2009
+ },
2010
+ {
2011
+ "epoch": 4.65,
2012
+ "grad_norm": 2.3806753158569336,
2013
+ "learning_rate": 5.402545454545455e-06,
2014
+ "loss": 0.1283,
2015
+ "step": 25800
2016
+ },
2017
+ {
2018
+ "epoch": 4.67,
2019
+ "grad_norm": 2.4009785652160645,
2020
+ "learning_rate": 5.384363636363637e-06,
2021
+ "loss": 0.1172,
2022
+ "step": 25900
2023
+ },
2024
+ {
2025
+ "epoch": 4.68,
2026
+ "grad_norm": 1.9371693134307861,
2027
+ "learning_rate": 5.3661818181818185e-06,
2028
+ "loss": 0.1204,
2029
+ "step": 26000
2030
+ },
2031
+ {
2032
+ "epoch": 4.68,
2033
+ "eval_loss": 0.19222472608089447,
2034
+ "eval_runtime": 560.4905,
2035
+ "eval_samples_per_second": 108.607,
2036
+ "eval_steps_per_second": 3.395,
2037
+ "step": 26000
2038
+ },
2039
+ {
2040
+ "epoch": 4.7,
2041
+ "grad_norm": 2.303044557571411,
2042
+ "learning_rate": 5.348000000000001e-06,
2043
+ "loss": 0.1242,
2044
+ "step": 26100
2045
+ },
2046
+ {
2047
+ "epoch": 4.72,
2048
+ "grad_norm": 2.349740743637085,
2049
+ "learning_rate": 5.329818181818183e-06,
2050
+ "loss": 0.1253,
2051
+ "step": 26200
2052
+ },
2053
+ {
2054
+ "epoch": 4.74,
2055
+ "grad_norm": 2.497861623764038,
2056
+ "learning_rate": 5.311636363636364e-06,
2057
+ "loss": 0.1231,
2058
+ "step": 26300
2059
+ },
2060
+ {
2061
+ "epoch": 4.76,
2062
+ "grad_norm": 2.0001113414764404,
2063
+ "learning_rate": 5.293454545454546e-06,
2064
+ "loss": 0.1237,
2065
+ "step": 26400
2066
+ },
2067
+ {
2068
+ "epoch": 4.77,
2069
+ "grad_norm": 2.0054848194122314,
2070
+ "learning_rate": 5.275272727272728e-06,
2071
+ "loss": 0.1169,
2072
+ "step": 26500
2073
+ },
2074
+ {
2075
+ "epoch": 4.79,
2076
+ "grad_norm": 2.3605902194976807,
2077
+ "learning_rate": 5.2570909090909095e-06,
2078
+ "loss": 0.1229,
2079
+ "step": 26600
2080
+ },
2081
+ {
2082
+ "epoch": 4.81,
2083
+ "grad_norm": 2.1442110538482666,
2084
+ "learning_rate": 5.238909090909091e-06,
2085
+ "loss": 0.1186,
2086
+ "step": 26700
2087
+ },
2088
+ {
2089
+ "epoch": 4.83,
2090
+ "grad_norm": 1.9753350019454956,
2091
+ "learning_rate": 5.220727272727273e-06,
2092
+ "loss": 0.1207,
2093
+ "step": 26800
2094
+ },
2095
+ {
2096
+ "epoch": 4.85,
2097
+ "grad_norm": 2.507814884185791,
2098
+ "learning_rate": 5.202545454545454e-06,
2099
+ "loss": 0.1239,
2100
+ "step": 26900
2101
+ },
2102
+ {
2103
+ "epoch": 4.86,
2104
+ "grad_norm": 2.083677291870117,
2105
+ "learning_rate": 5.184363636363636e-06,
2106
+ "loss": 0.125,
2107
+ "step": 27000
2108
+ },
2109
+ {
2110
+ "epoch": 4.86,
2111
+ "eval_loss": 0.18878485262393951,
2112
+ "eval_runtime": 570.8746,
2113
+ "eval_samples_per_second": 106.631,
2114
+ "eval_steps_per_second": 3.333,
2115
+ "step": 27000
2116
+ },
2117
+ {
2118
+ "epoch": 4.88,
2119
+ "grad_norm": 2.1022801399230957,
2120
+ "learning_rate": 5.166181818181819e-06,
2121
+ "loss": 0.1175,
2122
+ "step": 27100
2123
+ },
2124
+ {
2125
+ "epoch": 4.9,
2126
+ "grad_norm": 2.282572031021118,
2127
+ "learning_rate": 5.1480000000000005e-06,
2128
+ "loss": 0.1209,
2129
+ "step": 27200
2130
+ },
2131
+ {
2132
+ "epoch": 4.92,
2133
+ "grad_norm": 2.1377222537994385,
2134
+ "learning_rate": 5.129818181818182e-06,
2135
+ "loss": 0.12,
2136
+ "step": 27300
2137
+ },
2138
+ {
2139
+ "epoch": 4.94,
2140
+ "grad_norm": 2.226515769958496,
2141
+ "learning_rate": 5.111636363636364e-06,
2142
+ "loss": 0.1183,
2143
+ "step": 27400
2144
+ },
2145
+ {
2146
+ "epoch": 4.95,
2147
+ "grad_norm": 1.9465168714523315,
2148
+ "learning_rate": 5.093454545454546e-06,
2149
+ "loss": 0.1197,
2150
+ "step": 27500
2151
+ },
2152
+ {
2153
+ "epoch": 4.97,
2154
+ "grad_norm": 2.625356912612915,
2155
+ "learning_rate": 5.075272727272728e-06,
2156
+ "loss": 0.1202,
2157
+ "step": 27600
2158
+ },
2159
+ {
2160
+ "epoch": 4.99,
2161
+ "grad_norm": 2.350402355194092,
2162
+ "learning_rate": 5.057090909090909e-06,
2163
+ "loss": 0.12,
2164
+ "step": 27700
2165
+ },
2166
+ {
2167
+ "epoch": 5.01,
2168
+ "grad_norm": 2.0888664722442627,
2169
+ "learning_rate": 5.038909090909091e-06,
2170
+ "loss": 0.111,
2171
+ "step": 27800
2172
+ },
2173
+ {
2174
+ "epoch": 5.03,
2175
+ "grad_norm": 2.536491632461548,
2176
+ "learning_rate": 5.0207272727272725e-06,
2177
+ "loss": 0.108,
2178
+ "step": 27900
2179
+ },
2180
+ {
2181
+ "epoch": 5.05,
2182
+ "grad_norm": 1.663758397102356,
2183
+ "learning_rate": 5.002545454545455e-06,
2184
+ "loss": 0.1093,
2185
+ "step": 28000
2186
+ },
2187
+ {
2188
+ "epoch": 5.05,
2189
+ "eval_loss": 0.1907467395067215,
2190
+ "eval_runtime": 558.5701,
2191
+ "eval_samples_per_second": 108.98,
2192
+ "eval_steps_per_second": 3.407,
2193
+ "step": 28000
2194
+ },
2195
+ {
2196
+ "epoch": 5.06,
2197
+ "grad_norm": 2.2518911361694336,
2198
+ "learning_rate": 4.984363636363636e-06,
2199
+ "loss": 0.1093,
2200
+ "step": 28100
2201
+ },
2202
+ {
2203
+ "epoch": 5.08,
2204
+ "grad_norm": 1.9912610054016113,
2205
+ "learning_rate": 4.966181818181818e-06,
2206
+ "loss": 0.1102,
2207
+ "step": 28200
2208
+ },
2209
+ {
2210
+ "epoch": 5.1,
2211
+ "grad_norm": 2.848151922225952,
2212
+ "learning_rate": 4.948000000000001e-06,
2213
+ "loss": 0.114,
2214
+ "step": 28300
2215
+ },
2216
+ {
2217
+ "epoch": 5.12,
2218
+ "grad_norm": 2.5407612323760986,
2219
+ "learning_rate": 4.9298181818181826e-06,
2220
+ "loss": 0.1157,
2221
+ "step": 28400
2222
+ },
2223
+ {
2224
+ "epoch": 5.14,
2225
+ "grad_norm": 1.966389536857605,
2226
+ "learning_rate": 4.9116363636363636e-06,
2227
+ "loss": 0.1108,
2228
+ "step": 28500
2229
+ },
2230
+ {
2231
+ "epoch": 5.15,
2232
+ "grad_norm": 2.2150988578796387,
2233
+ "learning_rate": 4.893454545454545e-06,
2234
+ "loss": 0.1097,
2235
+ "step": 28600
2236
+ },
2237
+ {
2238
+ "epoch": 5.17,
2239
+ "grad_norm": 2.6871962547302246,
2240
+ "learning_rate": 4.875272727272728e-06,
2241
+ "loss": 0.1078,
2242
+ "step": 28700
2243
+ },
2244
+ {
2245
+ "epoch": 5.19,
2246
+ "grad_norm": 2.092545986175537,
2247
+ "learning_rate": 4.85709090909091e-06,
2248
+ "loss": 0.1139,
2249
+ "step": 28800
2250
+ },
2251
+ {
2252
+ "epoch": 5.21,
2253
+ "grad_norm": 2.1619746685028076,
2254
+ "learning_rate": 4.838909090909091e-06,
2255
+ "loss": 0.1137,
2256
+ "step": 28900
2257
+ },
2258
+ {
2259
+ "epoch": 5.23,
2260
+ "grad_norm": 2.059086322784424,
2261
+ "learning_rate": 4.820727272727273e-06,
2262
+ "loss": 0.1092,
2263
+ "step": 29000
2264
+ },
2265
+ {
2266
+ "epoch": 5.23,
2267
+ "eval_loss": 0.19213946163654327,
2268
+ "eval_runtime": 559.5411,
2269
+ "eval_samples_per_second": 108.791,
2270
+ "eval_steps_per_second": 3.401,
2271
+ "step": 29000
2272
+ },
2273
+ {
2274
+ "epoch": 5.24,
2275
+ "grad_norm": 2.200467109680176,
2276
+ "learning_rate": 4.802545454545455e-06,
2277
+ "loss": 0.1166,
2278
+ "step": 29100
2279
+ },
2280
+ {
2281
+ "epoch": 5.26,
2282
+ "grad_norm": 2.1066653728485107,
2283
+ "learning_rate": 4.784363636363637e-06,
2284
+ "loss": 0.1116,
2285
+ "step": 29200
2286
+ },
2287
+ {
2288
+ "epoch": 5.28,
2289
+ "grad_norm": 1.9387317895889282,
2290
+ "learning_rate": 4.766181818181818e-06,
2291
+ "loss": 0.1158,
2292
+ "step": 29300
2293
+ },
2294
+ {
2295
+ "epoch": 5.3,
2296
+ "grad_norm": 2.8568620681762695,
2297
+ "learning_rate": 4.748e-06,
2298
+ "loss": 0.1101,
2299
+ "step": 29400
2300
+ },
2301
+ {
2302
+ "epoch": 5.32,
2303
+ "grad_norm": 2.667982816696167,
2304
+ "learning_rate": 4.729818181818182e-06,
2305
+ "loss": 0.1154,
2306
+ "step": 29500
2307
+ },
2308
+ {
2309
+ "epoch": 5.33,
2310
+ "grad_norm": 1.8243011236190796,
2311
+ "learning_rate": 4.711636363636364e-06,
2312
+ "loss": 0.1115,
2313
+ "step": 29600
2314
+ },
2315
+ {
2316
+ "epoch": 5.35,
2317
+ "grad_norm": 2.2636425495147705,
2318
+ "learning_rate": 4.693636363636364e-06,
2319
+ "loss": 0.1107,
2320
+ "step": 29700
2321
+ },
2322
+ {
2323
+ "epoch": 5.37,
2324
+ "grad_norm": 2.183295488357544,
2325
+ "learning_rate": 4.675454545454546e-06,
2326
+ "loss": 0.1097,
2327
+ "step": 29800
2328
+ },
2329
+ {
2330
+ "epoch": 5.39,
2331
+ "grad_norm": 1.9221436977386475,
2332
+ "learning_rate": 4.657272727272728e-06,
2333
+ "loss": 0.1102,
2334
+ "step": 29900
2335
+ },
2336
+ {
2337
+ "epoch": 5.41,
2338
+ "grad_norm": 2.4164745807647705,
2339
+ "learning_rate": 4.639090909090909e-06,
2340
+ "loss": 0.1113,
2341
+ "step": 30000
2342
+ },
2343
+ {
2344
+ "epoch": 5.41,
2345
+ "eval_loss": 0.1900329291820526,
2346
+ "eval_runtime": 555.8487,
2347
+ "eval_samples_per_second": 109.514,
2348
+ "eval_steps_per_second": 3.424,
2349
+ "step": 30000
2350
+ },
2351
+ {
2352
+ "epoch": 5.42,
2353
+ "grad_norm": 1.9355889558792114,
2354
+ "learning_rate": 4.6209090909090915e-06,
2355
+ "loss": 0.1093,
2356
+ "step": 30100
2357
+ },
2358
+ {
2359
+ "epoch": 5.44,
2360
+ "grad_norm": 2.172149419784546,
2361
+ "learning_rate": 4.602727272727273e-06,
2362
+ "loss": 0.1091,
2363
+ "step": 30200
2364
+ },
2365
+ {
2366
+ "epoch": 5.46,
2367
+ "grad_norm": 2.230680465698242,
2368
+ "learning_rate": 4.584545454545455e-06,
2369
+ "loss": 0.1068,
2370
+ "step": 30300
2371
+ },
2372
+ {
2373
+ "epoch": 5.48,
2374
+ "grad_norm": 2.4593875408172607,
2375
+ "learning_rate": 4.566363636363636e-06,
2376
+ "loss": 0.1076,
2377
+ "step": 30400
2378
+ },
2379
+ {
2380
+ "epoch": 5.5,
2381
+ "grad_norm": 2.358771324157715,
2382
+ "learning_rate": 4.548181818181819e-06,
2383
+ "loss": 0.1104,
2384
+ "step": 30500
2385
+ },
2386
+ {
2387
+ "epoch": 5.51,
2388
+ "grad_norm": 2.006244421005249,
2389
+ "learning_rate": 4.530000000000001e-06,
2390
+ "loss": 0.1081,
2391
+ "step": 30600
2392
+ },
2393
+ {
2394
+ "epoch": 5.53,
2395
+ "grad_norm": 2.121628999710083,
2396
+ "learning_rate": 4.5118181818181825e-06,
2397
+ "loss": 0.1152,
2398
+ "step": 30700
2399
+ },
2400
+ {
2401
+ "epoch": 5.55,
2402
+ "grad_norm": 2.2535011768341064,
2403
+ "learning_rate": 4.4936363636363635e-06,
2404
+ "loss": 0.113,
2405
+ "step": 30800
2406
+ },
2407
+ {
2408
+ "epoch": 5.57,
2409
+ "grad_norm": 2.598020553588867,
2410
+ "learning_rate": 4.475454545454545e-06,
2411
+ "loss": 0.1082,
2412
+ "step": 30900
2413
+ },
2414
+ {
2415
+ "epoch": 5.59,
2416
+ "grad_norm": 2.041231155395508,
2417
+ "learning_rate": 4.457272727272728e-06,
2418
+ "loss": 0.1128,
2419
+ "step": 31000
2420
+ },
2421
+ {
2422
+ "epoch": 5.59,
2423
+ "eval_loss": 0.1888962835073471,
2424
+ "eval_runtime": 561.6459,
2425
+ "eval_samples_per_second": 108.383,
2426
+ "eval_steps_per_second": 3.388,
2427
+ "step": 31000
2428
+ },
2429
+ {
2430
+ "epoch": 5.6,
2431
+ "grad_norm": 1.9678025245666504,
2432
+ "learning_rate": 4.43909090909091e-06,
2433
+ "loss": 0.1049,
2434
+ "step": 31100
2435
+ },
2436
+ {
2437
+ "epoch": 5.62,
2438
+ "grad_norm": 2.5535237789154053,
2439
+ "learning_rate": 4.420909090909091e-06,
2440
+ "loss": 0.1124,
2441
+ "step": 31200
2442
+ },
2443
+ {
2444
+ "epoch": 5.64,
2445
+ "grad_norm": 2.313497304916382,
2446
+ "learning_rate": 4.402727272727273e-06,
2447
+ "loss": 0.111,
2448
+ "step": 31300
2449
+ },
2450
+ {
2451
+ "epoch": 5.66,
2452
+ "grad_norm": 2.2810420989990234,
2453
+ "learning_rate": 4.3845454545454545e-06,
2454
+ "loss": 0.1078,
2455
+ "step": 31400
2456
+ },
2457
+ {
2458
+ "epoch": 5.68,
2459
+ "grad_norm": 1.816409945487976,
2460
+ "learning_rate": 4.366363636363637e-06,
2461
+ "loss": 0.1091,
2462
+ "step": 31500
2463
+ },
2464
+ {
2465
+ "epoch": 5.69,
2466
+ "grad_norm": 2.5376205444335938,
2467
+ "learning_rate": 4.348181818181818e-06,
2468
+ "loss": 0.1124,
2469
+ "step": 31600
2470
+ },
2471
+ {
2472
+ "epoch": 5.71,
2473
+ "grad_norm": 1.8754093647003174,
2474
+ "learning_rate": 4.33e-06,
2475
+ "loss": 0.108,
2476
+ "step": 31700
2477
+ },
2478
+ {
2479
+ "epoch": 5.73,
2480
+ "grad_norm": 2.0413951873779297,
2481
+ "learning_rate": 4.311818181818182e-06,
2482
+ "loss": 0.1051,
2483
+ "step": 31800
2484
+ },
2485
+ {
2486
+ "epoch": 5.75,
2487
+ "grad_norm": 1.9775103330612183,
2488
+ "learning_rate": 4.293636363636364e-06,
2489
+ "loss": 0.1101,
2490
+ "step": 31900
2491
+ },
2492
+ {
2493
+ "epoch": 5.77,
2494
+ "grad_norm": 2.9266469478607178,
2495
+ "learning_rate": 4.2754545454545456e-06,
2496
+ "loss": 0.1101,
2497
+ "step": 32000
2498
+ },
2499
+ {
2500
+ "epoch": 5.77,
2501
+ "eval_loss": 0.1881016492843628,
2502
+ "eval_runtime": 557.9801,
2503
+ "eval_samples_per_second": 109.095,
2504
+ "eval_steps_per_second": 3.411,
2505
+ "step": 32000
2506
+ },
2507
+ {
2508
+ "epoch": 5.78,
2509
+ "grad_norm": 2.3823201656341553,
2510
+ "learning_rate": 4.257272727272727e-06,
2511
+ "loss": 0.1103,
2512
+ "step": 32100
2513
+ },
2514
+ {
2515
+ "epoch": 5.8,
2516
+ "grad_norm": 2.3224339485168457,
2517
+ "learning_rate": 4.239090909090909e-06,
2518
+ "loss": 0.1117,
2519
+ "step": 32200
2520
+ },
2521
+ {
2522
+ "epoch": 5.82,
2523
+ "grad_norm": 2.2235219478607178,
2524
+ "learning_rate": 4.220909090909091e-06,
2525
+ "loss": 0.1093,
2526
+ "step": 32300
2527
+ },
2528
+ {
2529
+ "epoch": 5.84,
2530
+ "grad_norm": 2.0410640239715576,
2531
+ "learning_rate": 4.202727272727273e-06,
2532
+ "loss": 0.1099,
2533
+ "step": 32400
2534
+ },
2535
+ {
2536
+ "epoch": 5.86,
2537
+ "grad_norm": 2.325864553451538,
2538
+ "learning_rate": 4.184545454545455e-06,
2539
+ "loss": 0.1075,
2540
+ "step": 32500
2541
+ },
2542
+ {
2543
+ "epoch": 5.87,
2544
+ "grad_norm": 1.8241126537322998,
2545
+ "learning_rate": 4.166545454545455e-06,
2546
+ "loss": 0.1082,
2547
+ "step": 32600
2548
+ },
2549
+ {
2550
+ "epoch": 5.89,
2551
+ "grad_norm": 1.5952904224395752,
2552
+ "learning_rate": 4.148363636363636e-06,
2553
+ "loss": 0.1108,
2554
+ "step": 32700
2555
+ },
2556
+ {
2557
+ "epoch": 5.91,
2558
+ "grad_norm": 2.1724677085876465,
2559
+ "learning_rate": 4.130181818181819e-06,
2560
+ "loss": 0.1094,
2561
+ "step": 32800
2562
+ },
2563
+ {
2564
+ "epoch": 5.93,
2565
+ "grad_norm": 2.2764976024627686,
2566
+ "learning_rate": 4.112000000000001e-06,
2567
+ "loss": 0.1044,
2568
+ "step": 32900
2569
+ },
2570
+ {
2571
+ "epoch": 5.95,
2572
+ "grad_norm": 2.123507261276245,
2573
+ "learning_rate": 4.0938181818181824e-06,
2574
+ "loss": 0.1083,
2575
+ "step": 33000
2576
+ },
2577
+ {
2578
+ "epoch": 5.95,
2579
+ "eval_loss": 0.18839485943317413,
2580
+ "eval_runtime": 556.9779,
2581
+ "eval_samples_per_second": 109.292,
2582
+ "eval_steps_per_second": 3.417,
2583
+ "step": 33000
2584
+ },
2585
+ {
2586
+ "epoch": 5.96,
2587
+ "grad_norm": 2.797337532043457,
2588
+ "learning_rate": 4.0756363636363634e-06,
2589
+ "loss": 0.1153,
2590
+ "step": 33100
2591
+ },
2592
+ {
2593
+ "epoch": 5.98,
2594
+ "grad_norm": 2.4354584217071533,
2595
+ "learning_rate": 4.057454545454545e-06,
2596
+ "loss": 0.1112,
2597
+ "step": 33200
2598
+ },
2599
+ {
2600
+ "epoch": 6.0,
2601
+ "grad_norm": 2.2812533378601074,
2602
+ "learning_rate": 4.039272727272728e-06,
2603
+ "loss": 0.1068,
2604
+ "step": 33300
2605
+ },
2606
+ {
2607
+ "epoch": 6.02,
2608
+ "grad_norm": 1.898974895477295,
2609
+ "learning_rate": 4.02109090909091e-06,
2610
+ "loss": 0.099,
2611
+ "step": 33400
2612
+ },
2613
+ {
2614
+ "epoch": 6.04,
2615
+ "grad_norm": 2.096282482147217,
2616
+ "learning_rate": 4.002909090909091e-06,
2617
+ "loss": 0.1033,
2618
+ "step": 33500
2619
+ },
2620
+ {
2621
+ "epoch": 6.05,
2622
+ "grad_norm": 2.209646224975586,
2623
+ "learning_rate": 3.984727272727273e-06,
2624
+ "loss": 0.1001,
2625
+ "step": 33600
2626
+ },
2627
+ {
2628
+ "epoch": 6.07,
2629
+ "grad_norm": 2.0665271282196045,
2630
+ "learning_rate": 3.966545454545455e-06,
2631
+ "loss": 0.1007,
2632
+ "step": 33700
2633
+ },
2634
+ {
2635
+ "epoch": 6.09,
2636
+ "grad_norm": 2.2653417587280273,
2637
+ "learning_rate": 3.948363636363637e-06,
2638
+ "loss": 0.0975,
2639
+ "step": 33800
2640
+ },
2641
+ {
2642
+ "epoch": 6.11,
2643
+ "grad_norm": 1.7271119356155396,
2644
+ "learning_rate": 3.930181818181818e-06,
2645
+ "loss": 0.1063,
2646
+ "step": 33900
2647
+ },
2648
+ {
2649
+ "epoch": 6.13,
2650
+ "grad_norm": 1.858734369277954,
2651
+ "learning_rate": 3.912e-06,
2652
+ "loss": 0.0983,
2653
+ "step": 34000
2654
+ },
2655
+ {
2656
+ "epoch": 6.13,
2657
+ "eval_loss": 0.18823260068893433,
2658
+ "eval_runtime": 563.2389,
2659
+ "eval_samples_per_second": 108.077,
2660
+ "eval_steps_per_second": 3.379,
2661
+ "step": 34000
2662
+ },
2663
+ {
2664
+ "epoch": 6.14,
2665
+ "grad_norm": 2.122073173522949,
2666
+ "learning_rate": 3.893818181818182e-06,
2667
+ "loss": 0.1017,
2668
+ "step": 34100
2669
+ },
2670
+ {
2671
+ "epoch": 6.16,
2672
+ "grad_norm": 1.7919000387191772,
2673
+ "learning_rate": 3.8756363636363645e-06,
2674
+ "loss": 0.1018,
2675
+ "step": 34200
2676
+ },
2677
+ {
2678
+ "epoch": 6.18,
2679
+ "grad_norm": 1.944100022315979,
2680
+ "learning_rate": 3.8574545454545455e-06,
2681
+ "loss": 0.1019,
2682
+ "step": 34300
2683
+ },
2684
+ {
2685
+ "epoch": 6.2,
2686
+ "grad_norm": 2.422239065170288,
2687
+ "learning_rate": 3.839272727272727e-06,
2688
+ "loss": 0.1011,
2689
+ "step": 34400
2690
+ },
2691
+ {
2692
+ "epoch": 6.22,
2693
+ "grad_norm": 2.4203903675079346,
2694
+ "learning_rate": 3.821090909090909e-06,
2695
+ "loss": 0.1015,
2696
+ "step": 34500
2697
+ },
2698
+ {
2699
+ "epoch": 6.23,
2700
+ "grad_norm": 2.3504583835601807,
2701
+ "learning_rate": 3.802909090909091e-06,
2702
+ "loss": 0.1019,
2703
+ "step": 34600
2704
+ },
2705
+ {
2706
+ "epoch": 6.25,
2707
+ "grad_norm": 2.062124729156494,
2708
+ "learning_rate": 3.7849090909090914e-06,
2709
+ "loss": 0.1048,
2710
+ "step": 34700
2711
+ },
2712
+ {
2713
+ "epoch": 6.27,
2714
+ "grad_norm": 2.1046996116638184,
2715
+ "learning_rate": 3.766727272727273e-06,
2716
+ "loss": 0.1012,
2717
+ "step": 34800
2718
+ },
2719
+ {
2720
+ "epoch": 6.29,
2721
+ "grad_norm": 2.111078977584839,
2722
+ "learning_rate": 3.7485454545454546e-06,
2723
+ "loss": 0.1031,
2724
+ "step": 34900
2725
+ },
2726
+ {
2727
+ "epoch": 6.31,
2728
+ "grad_norm": 1.7998141050338745,
2729
+ "learning_rate": 3.7303636363636364e-06,
2730
+ "loss": 0.1041,
2731
+ "step": 35000
2732
+ },
2733
+ {
2734
+ "epoch": 6.31,
2735
+ "eval_loss": 0.1882905513048172,
2736
+ "eval_runtime": 557.868,
2737
+ "eval_samples_per_second": 109.117,
2738
+ "eval_steps_per_second": 3.411,
2739
+ "step": 35000
2740
+ },
2741
+ {
2742
+ "epoch": 6.32,
2743
+ "grad_norm": 2.569345712661743,
2744
+ "learning_rate": 3.7121818181818187e-06,
2745
+ "loss": 0.1015,
2746
+ "step": 35100
2747
+ },
2748
+ {
2749
+ "epoch": 6.34,
2750
+ "grad_norm": 2.156580686569214,
2751
+ "learning_rate": 3.6940000000000005e-06,
2752
+ "loss": 0.1007,
2753
+ "step": 35200
2754
+ },
2755
+ {
2756
+ "epoch": 6.36,
2757
+ "grad_norm": 2.1598432064056396,
2758
+ "learning_rate": 3.675818181818182e-06,
2759
+ "loss": 0.1035,
2760
+ "step": 35300
2761
+ },
2762
+ {
2763
+ "epoch": 6.38,
2764
+ "grad_norm": 2.94124698638916,
2765
+ "learning_rate": 3.657636363636364e-06,
2766
+ "loss": 0.1028,
2767
+ "step": 35400
2768
+ },
2769
+ {
2770
+ "epoch": 6.4,
2771
+ "grad_norm": 2.118029832839966,
2772
+ "learning_rate": 3.639454545454546e-06,
2773
+ "loss": 0.1027,
2774
+ "step": 35500
2775
+ },
2776
+ {
2777
+ "epoch": 6.41,
2778
+ "grad_norm": 2.3655309677124023,
2779
+ "learning_rate": 3.621272727272728e-06,
2780
+ "loss": 0.1013,
2781
+ "step": 35600
2782
+ },
2783
+ {
2784
+ "epoch": 6.43,
2785
+ "grad_norm": 2.1393494606018066,
2786
+ "learning_rate": 3.6030909090909093e-06,
2787
+ "loss": 0.1007,
2788
+ "step": 35700
2789
+ },
2790
+ {
2791
+ "epoch": 6.45,
2792
+ "grad_norm": 2.1543033123016357,
2793
+ "learning_rate": 3.584909090909091e-06,
2794
+ "loss": 0.1048,
2795
+ "step": 35800
2796
+ },
2797
+ {
2798
+ "epoch": 6.47,
2799
+ "grad_norm": 2.0389814376831055,
2800
+ "learning_rate": 3.566727272727273e-06,
2801
+ "loss": 0.1027,
2802
+ "step": 35900
2803
+ },
2804
+ {
2805
+ "epoch": 6.49,
2806
+ "grad_norm": 1.9840948581695557,
2807
+ "learning_rate": 3.5485454545454553e-06,
2808
+ "loss": 0.0997,
2809
+ "step": 36000
2810
+ },
2811
+ {
2812
+ "epoch": 6.49,
2813
+ "eval_loss": 0.18674355745315552,
2814
+ "eval_runtime": 559.1771,
2815
+ "eval_samples_per_second": 108.862,
2816
+ "eval_steps_per_second": 3.403,
2817
+ "step": 36000
2818
+ },
2819
+ {
2820
+ "epoch": 6.5,
2821
+ "grad_norm": 2.0327794551849365,
2822
+ "learning_rate": 3.5303636363636367e-06,
2823
+ "loss": 0.1082,
2824
+ "step": 36100
2825
+ },
2826
+ {
2827
+ "epoch": 6.52,
2828
+ "grad_norm": 1.9940663576126099,
2829
+ "learning_rate": 3.5121818181818185e-06,
2830
+ "loss": 0.1008,
2831
+ "step": 36200
2832
+ },
2833
+ {
2834
+ "epoch": 6.54,
2835
+ "grad_norm": 2.4469642639160156,
2836
+ "learning_rate": 3.4940000000000003e-06,
2837
+ "loss": 0.0994,
2838
+ "step": 36300
2839
+ },
2840
+ {
2841
+ "epoch": 6.56,
2842
+ "grad_norm": 2.186110258102417,
2843
+ "learning_rate": 3.4758181818181818e-06,
2844
+ "loss": 0.0991,
2845
+ "step": 36400
2846
+ },
2847
+ {
2848
+ "epoch": 6.58,
2849
+ "grad_norm": 1.8428528308868408,
2850
+ "learning_rate": 3.457636363636364e-06,
2851
+ "loss": 0.1004,
2852
+ "step": 36500
2853
+ },
2854
+ {
2855
+ "epoch": 6.59,
2856
+ "grad_norm": 2.029137372970581,
2857
+ "learning_rate": 3.439454545454546e-06,
2858
+ "loss": 0.1017,
2859
+ "step": 36600
2860
+ },
2861
+ {
2862
+ "epoch": 6.61,
2863
+ "grad_norm": 2.730164051055908,
2864
+ "learning_rate": 3.4212727272727277e-06,
2865
+ "loss": 0.105,
2866
+ "step": 36700
2867
+ },
2868
+ {
2869
+ "epoch": 6.63,
2870
+ "grad_norm": 2.557441473007202,
2871
+ "learning_rate": 3.403090909090909e-06,
2872
+ "loss": 0.1,
2873
+ "step": 36800
2874
+ },
2875
+ {
2876
+ "epoch": 6.65,
2877
+ "grad_norm": 1.9623521566390991,
2878
+ "learning_rate": 3.3850909090909095e-06,
2879
+ "loss": 0.1023,
2880
+ "step": 36900
2881
+ },
2882
+ {
2883
+ "epoch": 6.67,
2884
+ "grad_norm": 2.96305513381958,
2885
+ "learning_rate": 3.3669090909090913e-06,
2886
+ "loss": 0.1043,
2887
+ "step": 37000
2888
+ },
2889
+ {
2890
+ "epoch": 6.67,
2891
+ "eval_loss": 0.18738530576229095,
2892
+ "eval_runtime": 561.774,
2893
+ "eval_samples_per_second": 108.359,
2894
+ "eval_steps_per_second": 3.387,
2895
+ "step": 37000
2896
+ },
2897
+ {
2898
+ "epoch": 6.68,
2899
+ "grad_norm": 2.373506784439087,
2900
+ "learning_rate": 3.348727272727273e-06,
2901
+ "loss": 0.1026,
2902
+ "step": 37100
2903
+ },
2904
+ {
2905
+ "epoch": 6.7,
2906
+ "grad_norm": 2.3987481594085693,
2907
+ "learning_rate": 3.3305454545454545e-06,
2908
+ "loss": 0.0965,
2909
+ "step": 37200
2910
+ },
2911
+ {
2912
+ "epoch": 6.72,
2913
+ "grad_norm": 2.418612003326416,
2914
+ "learning_rate": 3.312363636363637e-06,
2915
+ "loss": 0.1033,
2916
+ "step": 37300
2917
+ },
2918
+ {
2919
+ "epoch": 6.74,
2920
+ "grad_norm": 2.2459537982940674,
2921
+ "learning_rate": 3.2941818181818186e-06,
2922
+ "loss": 0.0968,
2923
+ "step": 37400
2924
+ },
2925
+ {
2926
+ "epoch": 6.76,
2927
+ "grad_norm": 2.8029379844665527,
2928
+ "learning_rate": 3.2760000000000005e-06,
2929
+ "loss": 0.1077,
2930
+ "step": 37500
2931
+ },
2932
+ {
2933
+ "epoch": 6.77,
2934
+ "grad_norm": 2.472376823425293,
2935
+ "learning_rate": 3.257818181818182e-06,
2936
+ "loss": 0.1055,
2937
+ "step": 37600
2938
+ },
2939
+ {
2940
+ "epoch": 6.79,
2941
+ "grad_norm": 2.410263776779175,
2942
+ "learning_rate": 3.2396363636363637e-06,
2943
+ "loss": 0.0975,
2944
+ "step": 37700
2945
+ },
2946
+ {
2947
+ "epoch": 6.81,
2948
+ "grad_norm": 2.254673719406128,
2949
+ "learning_rate": 3.221454545454546e-06,
2950
+ "loss": 0.0997,
2951
+ "step": 37800
2952
+ },
2953
+ {
2954
+ "epoch": 6.83,
2955
+ "grad_norm": 2.2963709831237793,
2956
+ "learning_rate": 3.203272727272728e-06,
2957
+ "loss": 0.0991,
2958
+ "step": 37900
2959
+ },
2960
+ {
2961
+ "epoch": 6.85,
2962
+ "grad_norm": 2.1210105419158936,
2963
+ "learning_rate": 3.1850909090909093e-06,
2964
+ "loss": 0.0988,
2965
+ "step": 38000
2966
+ },
2967
+ {
2968
+ "epoch": 6.85,
2969
+ "eval_loss": 0.18627458810806274,
2970
+ "eval_runtime": 562.6423,
2971
+ "eval_samples_per_second": 108.191,
2972
+ "eval_steps_per_second": 3.382,
2973
+ "step": 38000
2974
+ }
2975
+ ],
2976
+ "logging_steps": 100,
2977
+ "max_steps": 55500,
2978
+ "num_input_tokens_seen": 0,
2979
+ "num_train_epochs": 10,
2980
+ "save_steps": 1000,
2981
+ "total_flos": 1.577395142823143e+20,
2982
+ "train_batch_size": 16,
2983
+ "trial_name": null,
2984
+ "trial_params": null
2985
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff