Oblivion208 commited on
Commit
631be2e
1 Parent(s): 19c9f4e

Upload folder using huggingface_hub

Browse files
README.md ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: peft
3
+ ---
4
+ ## Training procedure
5
+
6
+
7
+ The following `bitsandbytes` quantization config was used during training:
8
+ - quant_method: bitsandbytes
9
+ - load_in_8bit: True
10
+ - load_in_4bit: False
11
+ - llm_int8_threshold: 6.0
12
+ - llm_int8_skip_modules: None
13
+ - llm_int8_enable_fp32_cpu_offload: False
14
+ - llm_int8_has_fp16_weight: False
15
+ - bnb_4bit_quant_type: fp4
16
+ - bnb_4bit_use_double_quant: False
17
+ - bnb_4bit_compute_dtype: float32
18
+ ### Framework versions
19
+
20
+
21
+ - PEFT 0.6.0.dev0
adapter_config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "auto_mapping": {
3
+ "base_model_class": "WhisperForConditionalGeneration",
4
+ "parent_library": "transformers.models.whisper.modeling_whisper"
5
+ },
6
+ "base_model_name_or_path": "openai/whisper-large-v2",
7
+ "bias": "none",
8
+ "fan_in_fan_out": false,
9
+ "inference_mode": true,
10
+ "init_lora_weights": true,
11
+ "layers_pattern": null,
12
+ "layers_to_transform": null,
13
+ "lora_alpha": 64,
14
+ "lora_dropout": 0.05,
15
+ "modules_to_save": null,
16
+ "peft_type": "LORA",
17
+ "r": 32,
18
+ "revision": null,
19
+ "target_modules": [
20
+ "q_proj",
21
+ "v_proj"
22
+ ],
23
+ "task_type": null
24
+ }
adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21944bdd09847d249a5f79f4206c3d90b5a21378be958ed96bf41cba7ff94cad
3
+ size 63056269
added_tokens.json ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<|af|>": 50327,
3
+ "<|am|>": 50334,
4
+ "<|ar|>": 50272,
5
+ "<|as|>": 50350,
6
+ "<|az|>": 50304,
7
+ "<|ba|>": 50355,
8
+ "<|be|>": 50330,
9
+ "<|bg|>": 50292,
10
+ "<|bn|>": 50302,
11
+ "<|bo|>": 50347,
12
+ "<|br|>": 50309,
13
+ "<|bs|>": 50315,
14
+ "<|ca|>": 50270,
15
+ "<|cs|>": 50283,
16
+ "<|cy|>": 50297,
17
+ "<|da|>": 50285,
18
+ "<|de|>": 50261,
19
+ "<|el|>": 50281,
20
+ "<|en|>": 50259,
21
+ "<|es|>": 50262,
22
+ "<|et|>": 50307,
23
+ "<|eu|>": 50310,
24
+ "<|fa|>": 50300,
25
+ "<|fi|>": 50277,
26
+ "<|fo|>": 50338,
27
+ "<|fr|>": 50265,
28
+ "<|gl|>": 50319,
29
+ "<|gu|>": 50333,
30
+ "<|haw|>": 50352,
31
+ "<|ha|>": 50354,
32
+ "<|he|>": 50279,
33
+ "<|hi|>": 50276,
34
+ "<|hr|>": 50291,
35
+ "<|ht|>": 50339,
36
+ "<|hu|>": 50286,
37
+ "<|hy|>": 50312,
38
+ "<|id|>": 50275,
39
+ "<|is|>": 50311,
40
+ "<|it|>": 50274,
41
+ "<|ja|>": 50266,
42
+ "<|jw|>": 50356,
43
+ "<|ka|>": 50329,
44
+ "<|kk|>": 50316,
45
+ "<|km|>": 50323,
46
+ "<|kn|>": 50306,
47
+ "<|ko|>": 50264,
48
+ "<|la|>": 50294,
49
+ "<|lb|>": 50345,
50
+ "<|ln|>": 50353,
51
+ "<|lo|>": 50336,
52
+ "<|lt|>": 50293,
53
+ "<|lv|>": 50301,
54
+ "<|mg|>": 50349,
55
+ "<|mi|>": 50295,
56
+ "<|mk|>": 50308,
57
+ "<|ml|>": 50296,
58
+ "<|mn|>": 50314,
59
+ "<|mr|>": 50320,
60
+ "<|ms|>": 50282,
61
+ "<|mt|>": 50343,
62
+ "<|my|>": 50346,
63
+ "<|ne|>": 50313,
64
+ "<|nl|>": 50271,
65
+ "<|nn|>": 50342,
66
+ "<|nocaptions|>": 50362,
67
+ "<|notimestamps|>": 50363,
68
+ "<|no|>": 50288,
69
+ "<|oc|>": 50328,
70
+ "<|pa|>": 50321,
71
+ "<|pl|>": 50269,
72
+ "<|ps|>": 50340,
73
+ "<|pt|>": 50267,
74
+ "<|ro|>": 50284,
75
+ "<|ru|>": 50263,
76
+ "<|sa|>": 50344,
77
+ "<|sd|>": 50332,
78
+ "<|si|>": 50322,
79
+ "<|sk|>": 50298,
80
+ "<|sl|>": 50305,
81
+ "<|sn|>": 50324,
82
+ "<|so|>": 50326,
83
+ "<|sq|>": 50317,
84
+ "<|sr|>": 50303,
85
+ "<|startoflm|>": 50360,
86
+ "<|startofprev|>": 50361,
87
+ "<|startoftranscript|>": 50258,
88
+ "<|su|>": 50357,
89
+ "<|sv|>": 50273,
90
+ "<|sw|>": 50318,
91
+ "<|ta|>": 50287,
92
+ "<|te|>": 50299,
93
+ "<|tg|>": 50331,
94
+ "<|th|>": 50289,
95
+ "<|tk|>": 50341,
96
+ "<|tl|>": 50348,
97
+ "<|transcribe|>": 50359,
98
+ "<|translate|>": 50358,
99
+ "<|tr|>": 50268,
100
+ "<|tt|>": 50351,
101
+ "<|uk|>": 50280,
102
+ "<|ur|>": 50290,
103
+ "<|uz|>": 50337,
104
+ "<|vi|>": 50278,
105
+ "<|yi|>": 50335,
106
+ "<|yo|>": 50325,
107
+ "<|zh|>": 50260
108
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
normalizer.json ADDED
@@ -0,0 +1,1742 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "accessorise": "accessorize",
3
+ "accessorised": "accessorized",
4
+ "accessorises": "accessorizes",
5
+ "accessorising": "accessorizing",
6
+ "acclimatisation": "acclimatization",
7
+ "acclimatise": "acclimatize",
8
+ "acclimatised": "acclimatized",
9
+ "acclimatises": "acclimatizes",
10
+ "acclimatising": "acclimatizing",
11
+ "accoutrements": "accouterments",
12
+ "aeon": "eon",
13
+ "aeons": "eons",
14
+ "aerogramme": "aerogram",
15
+ "aerogrammes": "aerograms",
16
+ "aeroplane": "airplane",
17
+ "aeroplanes": "airplanes",
18
+ "aesthete": "esthete",
19
+ "aesthetes": "esthetes",
20
+ "aesthetic": "esthetic",
21
+ "aesthetically": "esthetically",
22
+ "aesthetics": "esthetics",
23
+ "aetiology": "etiology",
24
+ "ageing": "aging",
25
+ "aggrandisement": "aggrandizement",
26
+ "agonise": "agonize",
27
+ "agonised": "agonized",
28
+ "agonises": "agonizes",
29
+ "agonising": "agonizing",
30
+ "agonisingly": "agonizingly",
31
+ "almanack": "almanac",
32
+ "almanacks": "almanacs",
33
+ "aluminium": "aluminum",
34
+ "amortisable": "amortizable",
35
+ "amortisation": "amortization",
36
+ "amortisations": "amortizations",
37
+ "amortise": "amortize",
38
+ "amortised": "amortized",
39
+ "amortises": "amortizes",
40
+ "amortising": "amortizing",
41
+ "amphitheatre": "amphitheater",
42
+ "amphitheatres": "amphitheaters",
43
+ "anaemia": "anemia",
44
+ "anaemic": "anemic",
45
+ "anaesthesia": "anesthesia",
46
+ "anaesthetic": "anesthetic",
47
+ "anaesthetics": "anesthetics",
48
+ "anaesthetise": "anesthetize",
49
+ "anaesthetised": "anesthetized",
50
+ "anaesthetises": "anesthetizes",
51
+ "anaesthetising": "anesthetizing",
52
+ "anaesthetist": "anesthetist",
53
+ "anaesthetists": "anesthetists",
54
+ "anaesthetize": "anesthetize",
55
+ "anaesthetized": "anesthetized",
56
+ "anaesthetizes": "anesthetizes",
57
+ "anaesthetizing": "anesthetizing",
58
+ "analogue": "analog",
59
+ "analogues": "analogs",
60
+ "analyse": "analyze",
61
+ "analysed": "analyzed",
62
+ "analyses": "analyzes",
63
+ "analysing": "analyzing",
64
+ "anglicise": "anglicize",
65
+ "anglicised": "anglicized",
66
+ "anglicises": "anglicizes",
67
+ "anglicising": "anglicizing",
68
+ "annualised": "annualized",
69
+ "antagonise": "antagonize",
70
+ "antagonised": "antagonized",
71
+ "antagonises": "antagonizes",
72
+ "antagonising": "antagonizing",
73
+ "apologise": "apologize",
74
+ "apologised": "apologized",
75
+ "apologises": "apologizes",
76
+ "apologising": "apologizing",
77
+ "appal": "appall",
78
+ "appals": "appalls",
79
+ "appetiser": "appetizer",
80
+ "appetisers": "appetizers",
81
+ "appetising": "appetizing",
82
+ "appetisingly": "appetizingly",
83
+ "arbour": "arbor",
84
+ "arbours": "arbors",
85
+ "archaeologically": "archeologically",
86
+ "archaeologist": "archeologist",
87
+ "archaeologists": "archeologists",
88
+ "archaeology": "archeology</span>",
89
+ "archeological": "archaeological",
90
+ "ardour": "ardor",
91
+ "armour": "armor",
92
+ "armoured": "armored",
93
+ "armourer": "armorer",
94
+ "armourers": "armorers",
95
+ "armouries": "armories",
96
+ "armoury": "armory",
97
+ "artefact": "artifact",
98
+ "artefacts": "artifacts",
99
+ "authorise": "authorize",
100
+ "authorised": "authorized",
101
+ "authorises": "authorizes",
102
+ "authorising": "authorizing",
103
+ "axe": "ax",
104
+ "backpedalled": "backpedaled",
105
+ "backpedalling": "backpedaling",
106
+ "bannister": "banister",
107
+ "bannisters": "banisters",
108
+ "baptise": "baptize",
109
+ "baptised": "baptized",
110
+ "baptises": "baptizes",
111
+ "baptising": "baptizing",
112
+ "bastardise": "bastardize",
113
+ "bastardised": "bastardized",
114
+ "bastardises": "bastardizes",
115
+ "bastardising": "bastardizing",
116
+ "battleax": "battleaxe",
117
+ "baulk": "balk",
118
+ "baulked": "balked",
119
+ "baulking": "balking",
120
+ "baulks": "balks",
121
+ "bedevilled": "bedeviled",
122
+ "bedevilling": "bedeviling",
123
+ "behaviour": "behavior",
124
+ "behavioural": "behavioral",
125
+ "behaviourism": "behaviorism",
126
+ "behaviourist": "behaviorist",
127
+ "behaviourists": "behaviorists",
128
+ "behaviours": "behaviors",
129
+ "behove": "behoove",
130
+ "behoved": "behooved",
131
+ "behoves": "behooves",
132
+ "bejewelled": "bejeweled",
133
+ "belabour": "belabor",
134
+ "belaboured": "belabored",
135
+ "belabouring": "belaboring",
136
+ "belabours": "belabors",
137
+ "bevelled": "beveled",
138
+ "bevvies": "bevies",
139
+ "bevvy": "bevy",
140
+ "biassed": "biased",
141
+ "biassing": "biasing",
142
+ "bingeing": "binging",
143
+ "bougainvillaea": "bougainvillea",
144
+ "bougainvillaeas": "bougainvilleas",
145
+ "bowdlerise": "bowdlerize",
146
+ "bowdlerised": "bowdlerized",
147
+ "bowdlerises": "bowdlerizes",
148
+ "bowdlerising": "bowdlerizing",
149
+ "breathalyse": "breathalyze",
150
+ "breathalysed": "breathalyzed",
151
+ "breathalyser": "breathalyzer",
152
+ "breathalysers": "breathalyzers",
153
+ "breathalyses": "breathalyzes",
154
+ "breathalysing": "breathalyzing",
155
+ "brutalise": "brutalize",
156
+ "brutalised": "brutalized",
157
+ "brutalises": "brutalizes",
158
+ "brutalising": "brutalizing",
159
+ "busses": "buses",
160
+ "bussing": "busing",
161
+ "caesarean": "cesarean",
162
+ "caesareans": "cesareans",
163
+ "calibre": "caliber",
164
+ "calibres": "calibers",
165
+ "calliper": "caliper",
166
+ "callipers": "calipers",
167
+ "callisthenics": "calisthenics",
168
+ "canalise": "canalize",
169
+ "canalised": "canalized",
170
+ "canalises": "canalizes",
171
+ "canalising": "canalizing",
172
+ "cancelation": "cancellation",
173
+ "cancelations": "cancellations",
174
+ "cancelled": "canceled",
175
+ "cancelling": "canceling",
176
+ "candour": "candor",
177
+ "cannibalise": "cannibalize",
178
+ "cannibalised": "cannibalized",
179
+ "cannibalises": "cannibalizes",
180
+ "cannibalising": "cannibalizing",
181
+ "canonise": "canonize",
182
+ "canonised": "canonized",
183
+ "canonises": "canonizes",
184
+ "canonising": "canonizing",
185
+ "capitalise": "capitalize",
186
+ "capitalised": "capitalized",
187
+ "capitalises": "capitalizes",
188
+ "capitalising": "capitalizing",
189
+ "caramelise": "caramelize",
190
+ "caramelised": "caramelized",
191
+ "caramelises": "caramelizes",
192
+ "caramelising": "caramelizing",
193
+ "carbonise": "carbonize",
194
+ "carbonised": "carbonized",
195
+ "carbonises": "carbonizes",
196
+ "carbonising": "carbonizing",
197
+ "carolled": "caroled",
198
+ "carolling": "caroling",
199
+ "catalogue": "catalog",
200
+ "catalogued": "cataloged",
201
+ "catalogues": "catalogs",
202
+ "cataloguing": "cataloging",
203
+ "catalyse": "catalyze",
204
+ "catalysed": "catalyzed",
205
+ "catalyses": "catalyzes",
206
+ "catalysing": "catalyzing",
207
+ "categorise": "categorize",
208
+ "categorised": "categorized",
209
+ "categorises": "categorizes",
210
+ "categorising": "categorizing",
211
+ "cauterise": "cauterize",
212
+ "cauterised": "cauterized",
213
+ "cauterises": "cauterizes",
214
+ "cauterising": "cauterizing",
215
+ "cavilled": "caviled",
216
+ "cavilling": "caviling",
217
+ "centigramme": "centigram",
218
+ "centigrammes": "centigrams",
219
+ "centilitre": "centiliter",
220
+ "centilitres": "centiliters",
221
+ "centimetre": "centimeter",
222
+ "centimetres": "centimeters",
223
+ "centralise": "centralize",
224
+ "centralised": "centralized",
225
+ "centralises": "centralizes",
226
+ "centralising": "centralizing",
227
+ "centre": "center",
228
+ "centred": "centered",
229
+ "centrefold": "centerfold",
230
+ "centrefolds": "centerfolds",
231
+ "centrepiece": "centerpiece",
232
+ "centrepieces": "centerpieces",
233
+ "centres": "centers",
234
+ "channelled": "channeled",
235
+ "channelling": "channeling",
236
+ "characterise": "characterize",
237
+ "characterised": "characterized",
238
+ "characterises": "characterizes",
239
+ "characterising": "characterizing",
240
+ "cheque": "check",
241
+ "chequebook": "checkbook",
242
+ "chequebooks": "checkbooks",
243
+ "chequered": "checkered",
244
+ "cheques": "checks",
245
+ "chilli": "chili",
246
+ "chimaera": "chimera",
247
+ "chimaeras": "chimeras",
248
+ "chiselled": "chiseled",
249
+ "chiselling": "chiseling",
250
+ "circularise": "circularize",
251
+ "circularised": "circularized",
252
+ "circularises": "circularizes",
253
+ "circularising": "circularizing",
254
+ "civilise": "civilize",
255
+ "civilised": "civilized",
256
+ "civilises": "civilizes",
257
+ "civilising": "civilizing",
258
+ "clamour": "clamor",
259
+ "clamoured": "clamored",
260
+ "clamouring": "clamoring",
261
+ "clamours": "clamors",
262
+ "clangour": "clangor",
263
+ "clarinettist": "clarinetist",
264
+ "clarinettists": "clarinetists",
265
+ "collectivise": "collectivize",
266
+ "collectivised": "collectivized",
267
+ "collectivises": "collectivizes",
268
+ "collectivising": "collectivizing",
269
+ "colonisation": "colonization",
270
+ "colonise": "colonize",
271
+ "colonised": "colonized",
272
+ "coloniser": "colonizer",
273
+ "colonisers": "colonizers",
274
+ "colonises": "colonizes",
275
+ "colonising": "colonizing",
276
+ "colour": "color",
277
+ "colourant": "colorant",
278
+ "colourants": "colorants",
279
+ "coloured": "colored",
280
+ "coloureds": "coloreds",
281
+ "colourful": "colorful",
282
+ "colourfully": "colorfully",
283
+ "colouring": "coloring",
284
+ "colourize": "colorize",
285
+ "colourized": "colorized",
286
+ "colourizes": "colorizes",
287
+ "colourizing": "colorizing",
288
+ "colourless": "colorless",
289
+ "colours": "colors",
290
+ "commercialise": "commercialize",
291
+ "commercialised": "commercialized",
292
+ "commercialises": "commercializes",
293
+ "commercialising": "commercializing",
294
+ "compartmentalise": "compartmentalize",
295
+ "compartmentalised": "compartmentalized",
296
+ "compartmentalises": "compartmentalizes",
297
+ "compartmentalising": "compartmentalizing",
298
+ "computerise": "computerize",
299
+ "computerised": "computerized",
300
+ "computerises": "computerizes",
301
+ "computerising": "computerizing",
302
+ "conceptualise": "conceptualize",
303
+ "conceptualised": "conceptualized",
304
+ "conceptualises": "conceptualizes",
305
+ "conceptualising": "conceptualizing",
306
+ "connexion": "connection",
307
+ "connexions": "connections",
308
+ "contextualise": "contextualize",
309
+ "contextualised": "contextualized",
310
+ "contextualises": "contextualizes",
311
+ "contextualising": "contextualizing",
312
+ "cosier": "cozier",
313
+ "cosies": "cozies",
314
+ "cosiest": "coziest",
315
+ "cosily": "cozily",
316
+ "cosiness": "coziness",
317
+ "cosy": "cozy",
318
+ "councillor": "councilor",
319
+ "councillors": "councilors",
320
+ "counselled": "counseled",
321
+ "counselling": "counseling",
322
+ "counsellor": "counselor",
323
+ "counsellors": "counselors",
324
+ "crenelated": "crenellated",
325
+ "criminalise": "criminalize",
326
+ "criminalised": "criminalized",
327
+ "criminalises": "criminalizes",
328
+ "criminalising": "criminalizing",
329
+ "criticise": "criticize",
330
+ "criticised": "criticized",
331
+ "criticises": "criticizes",
332
+ "criticising": "criticizing",
333
+ "crueller": "crueler",
334
+ "cruellest": "cruelest",
335
+ "crystallisation": "crystallization",
336
+ "crystallise": "crystallize",
337
+ "crystallised": "crystallized",
338
+ "crystallises": "crystallizes",
339
+ "crystallising": "crystallizing",
340
+ "cudgelled": "cudgeled",
341
+ "cudgelling": "cudgeling",
342
+ "customise": "customize",
343
+ "customised": "customized",
344
+ "customises": "customizes",
345
+ "customising": "customizing",
346
+ "cypher": "cipher",
347
+ "cyphers": "ciphers",
348
+ "decentralisation": "decentralization",
349
+ "decentralise": "decentralize",
350
+ "decentralised": "decentralized",
351
+ "decentralises": "decentralizes",
352
+ "decentralising": "decentralizing",
353
+ "decriminalisation": "decriminalization",
354
+ "decriminalise": "decriminalize",
355
+ "decriminalised": "decriminalized",
356
+ "decriminalises": "decriminalizes",
357
+ "decriminalising": "decriminalizing",
358
+ "defence": "defense",
359
+ "defenceless": "defenseless",
360
+ "defences": "defenses",
361
+ "dehumanisation": "dehumanization",
362
+ "dehumanise": "dehumanize",
363
+ "dehumanised": "dehumanized",
364
+ "dehumanises": "dehumanizes",
365
+ "dehumanising": "dehumanizing",
366
+ "demeanour": "demeanor",
367
+ "demilitarisation": "demilitarization",
368
+ "demilitarise": "demilitarize",
369
+ "demilitarised": "demilitarized",
370
+ "demilitarises": "demilitarizes",
371
+ "demilitarising": "demilitarizing",
372
+ "demobilisation": "demobilization",
373
+ "demobilise": "demobilize",
374
+ "demobilised": "demobilized",
375
+ "demobilises": "demobilizes",
376
+ "demobilising": "demobilizing",
377
+ "democratisation": "democratization",
378
+ "democratise": "democratize",
379
+ "democratised": "democratized",
380
+ "democratises": "democratizes",
381
+ "democratising": "democratizing",
382
+ "demonise": "demonize",
383
+ "demonised": "demonized",
384
+ "demonises": "demonizes",
385
+ "demonising": "demonizing",
386
+ "demoralisation": "demoralization",
387
+ "demoralise": "demoralize",
388
+ "demoralised": "demoralized",
389
+ "demoralises": "demoralizes",
390
+ "demoralising": "demoralizing",
391
+ "denationalisation": "denationalization",
392
+ "denationalise": "denationalize",
393
+ "denationalised": "denationalized",
394
+ "denationalises": "denationalizes",
395
+ "denationalising": "denationalizing",
396
+ "deodorise": "deodorize",
397
+ "deodorised": "deodorized",
398
+ "deodorises": "deodorizes",
399
+ "deodorising": "deodorizing",
400
+ "depersonalise": "depersonalize",
401
+ "depersonalised": "depersonalized",
402
+ "depersonalises": "depersonalizes",
403
+ "depersonalising": "depersonalizing",
404
+ "deputise": "deputize",
405
+ "deputised": "deputized",
406
+ "deputises": "deputizes",
407
+ "deputising": "deputizing",
408
+ "desensitisation": "desensitization",
409
+ "desensitise": "desensitize",
410
+ "desensitised": "desensitized",
411
+ "desensitises": "desensitizes",
412
+ "desensitising": "desensitizing",
413
+ "destabilisation": "destabilization",
414
+ "destabilise": "destabilize",
415
+ "destabilised": "destabilized",
416
+ "destabilises": "destabilizes",
417
+ "destabilising": "destabilizing",
418
+ "dialled": "dialed",
419
+ "dialling": "dialing",
420
+ "dialogue": "dialog",
421
+ "dialogues": "dialogs",
422
+ "diarrhoea": "diarrhea",
423
+ "digitise": "digitize",
424
+ "digitised": "digitized",
425
+ "digitises": "digitizes",
426
+ "digitising": "digitizing",
427
+ "disc": "disk",
428
+ "discolour": "discolor",
429
+ "discoloured": "discolored",
430
+ "discolouring": "discoloring",
431
+ "discolours": "discolors",
432
+ "discs": "disks",
433
+ "disembowelled": "disemboweled",
434
+ "disembowelling": "disemboweling",
435
+ "disfavour": "disfavor",
436
+ "dishevelled": "disheveled",
437
+ "dishonour": "dishonor",
438
+ "dishonourable": "dishonorable",
439
+ "dishonourably": "dishonorably",
440
+ "dishonoured": "dishonored",
441
+ "dishonouring": "dishonoring",
442
+ "dishonours": "dishonors",
443
+ "disorganisation": "disorganization",
444
+ "disorganised": "disorganized",
445
+ "distil": "distill",
446
+ "distils": "distills",
447
+ "dramatisation": "dramatization",
448
+ "dramatisations": "dramatizations",
449
+ "dramatise": "dramatize",
450
+ "dramatised": "dramatized",
451
+ "dramatises": "dramatizes",
452
+ "dramatising": "dramatizing",
453
+ "draught": "draft",
454
+ "draughtboard": "draftboard",
455
+ "draughtboards": "draftboards",
456
+ "draughtier": "draftier",
457
+ "draughtiest": "draftiest",
458
+ "draughts": "drafts",
459
+ "draughtsman": "draftsman",
460
+ "draughtsmanship": "draftsmanship",
461
+ "draughtsmen": "draftsmen",
462
+ "draughtswoman": "draftswoman",
463
+ "draughtswomen": "draftswomen",
464
+ "draughty": "drafty",
465
+ "drivelled": "driveled",
466
+ "drivelling": "driveling",
467
+ "duelled": "dueled",
468
+ "duelling": "dueling",
469
+ "economise": "economize",
470
+ "economised": "economized",
471
+ "economises": "economizes",
472
+ "economising": "economizing",
473
+ "editorialise": "editorialize",
474
+ "editorialised": "editorialized",
475
+ "editorialises": "editorializes",
476
+ "editorialising": "editorializing",
477
+ "edoema": "edema",
478
+ "empathise": "empathize",
479
+ "empathised": "empathized",
480
+ "empathises": "empathizes",
481
+ "empathising": "empathizing",
482
+ "emphasise": "emphasize",
483
+ "emphasised": "emphasized",
484
+ "emphasises": "emphasizes",
485
+ "emphasising": "emphasizing",
486
+ "enamelled": "enameled",
487
+ "enamelling": "enameling",
488
+ "enamoured": "enamored",
489
+ "encyclopaedia": "encyclopedia",
490
+ "encyclopaedias": "encyclopedias",
491
+ "encyclopaedic": "encyclopedic",
492
+ "endeavour": "endeavor",
493
+ "endeavoured": "endeavored",
494
+ "endeavouring": "endeavoring",
495
+ "endeavours": "endeavors",
496
+ "energise": "energize",
497
+ "energised": "energized",
498
+ "energises": "energizes",
499
+ "energising": "energizing",
500
+ "enrol": "enroll",
501
+ "enrols": "enrolls",
502
+ "enthral": "enthrall",
503
+ "enthrals": "enthralls",
504
+ "epaulette": "epaulet",
505
+ "epaulettes": "epaulets",
506
+ "epicentre": "epicenter",
507
+ "epicentres": "epicenters",
508
+ "epilogue": "epilog",
509
+ "epilogues": "epilogs",
510
+ "epitomise": "epitomize",
511
+ "epitomised": "epitomized",
512
+ "epitomises": "epitomizes",
513
+ "epitomising": "epitomizing",
514
+ "equalisation": "equalization",
515
+ "equalise": "equalize",
516
+ "equalised": "equalized",
517
+ "equaliser": "equalizer",
518
+ "equalisers": "equalizers",
519
+ "equalises": "equalizes",
520
+ "equalising": "equalizing",
521
+ "eulogise": "eulogize",
522
+ "eulogised": "eulogized",
523
+ "eulogises": "eulogizes",
524
+ "eulogising": "eulogizing",
525
+ "evangelise": "evangelize",
526
+ "evangelised": "evangelized",
527
+ "evangelises": "evangelizes",
528
+ "evangelising": "evangelizing",
529
+ "exorcise": "exorcize",
530
+ "exorcised": "exorcized",
531
+ "exorcises": "exorcizes",
532
+ "exorcising": "exorcizing",
533
+ "extemporisation": "extemporization",
534
+ "extemporise": "extemporize",
535
+ "extemporised": "extemporized",
536
+ "extemporises": "extemporizes",
537
+ "extemporising": "extemporizing",
538
+ "externalisation": "externalization",
539
+ "externalisations": "externalizations",
540
+ "externalise": "externalize",
541
+ "externalised": "externalized",
542
+ "externalises": "externalizes",
543
+ "externalising": "externalizing",
544
+ "factorise": "factorize",
545
+ "factorised": "factorized",
546
+ "factorises": "factorizes",
547
+ "factorising": "factorizing",
548
+ "faecal": "fecal",
549
+ "faeces": "feces",
550
+ "familiarisation": "familiarization",
551
+ "familiarise": "familiarize",
552
+ "familiarised": "familiarized",
553
+ "familiarises": "familiarizes",
554
+ "familiarising": "familiarizing",
555
+ "fantasise": "fantasize",
556
+ "fantasised": "fantasized",
557
+ "fantasises": "fantasizes",
558
+ "fantasising": "fantasizing",
559
+ "favour": "favor",
560
+ "favourable": "favorable",
561
+ "favourably": "favorably",
562
+ "favoured": "favored",
563
+ "favouring": "favoring",
564
+ "favourite": "favorite",
565
+ "favourites": "favorites",
566
+ "favouritism": "favoritism",
567
+ "favours": "favors",
568
+ "feminise": "feminize",
569
+ "feminised": "feminized",
570
+ "feminises": "feminizes",
571
+ "feminising": "feminizing",
572
+ "fertilisation": "fertilization",
573
+ "fertilise": "fertilize",
574
+ "fertilised": "fertilized",
575
+ "fertiliser": "fertilizer",
576
+ "fertilisers": "fertilizers",
577
+ "fertilises": "fertilizes",
578
+ "fertilising": "fertilizing",
579
+ "fervour": "fervor",
580
+ "fibre": "fiber",
581
+ "fibreglass": "fiberglass",
582
+ "fibres": "fibers",
583
+ "fictionalisation": "fictionalization",
584
+ "fictionalisations": "fictionalizations",
585
+ "fictionalise": "fictionalize",
586
+ "fictionalised": "fictionalized",
587
+ "fictionalises": "fictionalizes",
588
+ "fictionalising": "fictionalizing",
589
+ "fillet": "filet",
590
+ "filleted": "fileted",
591
+ "filleting": "fileting",
592
+ "fillets": "filets",
593
+ "finalisation": "finalization",
594
+ "finalise": "finalize",
595
+ "finalised": "finalized",
596
+ "finalises": "finalizes",
597
+ "finalising": "finalizing",
598
+ "flautist": "flutist",
599
+ "flautists": "flutists",
600
+ "flavour": "flavor",
601
+ "flavoured": "flavored",
602
+ "flavouring": "flavoring",
603
+ "flavourings": "flavorings",
604
+ "flavourless": "flavorless",
605
+ "flavours": "flavors",
606
+ "flavoursome": "flavorsome",
607
+ "flyer / flier": "flier / flyer",
608
+ "foetal": "fetal",
609
+ "foetid": "fetid",
610
+ "foetus": "fetus",
611
+ "foetuses": "fetuses",
612
+ "formalisation": "formalization",
613
+ "formalise": "formalize",
614
+ "formalised": "formalized",
615
+ "formalises": "formalizes",
616
+ "formalising": "formalizing",
617
+ "fossilisation": "fossilization",
618
+ "fossilise": "fossilize",
619
+ "fossilised": "fossilized",
620
+ "fossilises": "fossilizes",
621
+ "fossilising": "fossilizing",
622
+ "fraternisation": "fraternization",
623
+ "fraternise": "fraternize",
624
+ "fraternised": "fraternized",
625
+ "fraternises": "fraternizes",
626
+ "fraternising": "fraternizing",
627
+ "fulfil": "fulfill",
628
+ "fulfilment": "fulfillment",
629
+ "fulfils": "fulfills",
630
+ "funnelled": "funneled",
631
+ "funnelling": "funneling",
632
+ "gage": "gauge",
633
+ "gaged": "gauged",
634
+ "gages": "gauges",
635
+ "gaging": "gauging",
636
+ "galvanise": "galvanize",
637
+ "galvanised": "galvanized",
638
+ "galvanises": "galvanizes",
639
+ "galvanising": "galvanizing",
640
+ "gambolled": "gamboled",
641
+ "gambolling": "gamboling",
642
+ "gaol": "jail",
643
+ "gaolbird": "jailbird",
644
+ "gaolbirds": "jailbirds",
645
+ "gaolbreak": "jailbreak",
646
+ "gaolbreaks": "jailbreaks",
647
+ "gaoled": "jailed",
648
+ "gaoler": "jailer",
649
+ "gaolers": "jailers",
650
+ "gaoling": "jailing",
651
+ "gaols": "jails",
652
+ "gasses": "gases",
653
+ "generalisation": "generalization",
654
+ "generalisations": "generalizations",
655
+ "generalise": "generalize",
656
+ "generalised": "generalized",
657
+ "generalises": "generalizes",
658
+ "generalising": "generalizing",
659
+ "ghettoise": "ghettoize",
660
+ "ghettoised": "ghettoized",
661
+ "ghettoises": "ghettoizes",
662
+ "ghettoising": "ghettoizing",
663
+ "gipsies": "gypsies",
664
+ "glamor": "glamour",
665
+ "glamorise": "glamorize",
666
+ "glamorised": "glamorized",
667
+ "glamorises": "glamorizes",
668
+ "glamorising": "glamorizing",
669
+ "globalisation": "globalization",
670
+ "globalise": "globalize",
671
+ "globalised": "globalized",
672
+ "globalises": "globalizes",
673
+ "globalising": "globalizing",
674
+ "glueing": "gluing",
675
+ "goitre": "goiter",
676
+ "goitres": "goiters",
677
+ "gonorrhoea": "gonorrhea",
678
+ "gramme": "gram",
679
+ "grammes": "grams",
680
+ "gravelled": "graveled",
681
+ "grey": "gray",
682
+ "greyed": "grayed",
683
+ "greying": "graying",
684
+ "greyish": "grayish",
685
+ "greyness": "grayness",
686
+ "greys": "grays",
687
+ "grovelled": "groveled",
688
+ "grovelling": "groveling",
689
+ "groyne": "groin",
690
+ "groynes": "groins",
691
+ "gruelling": "grueling",
692
+ "gruellingly": "gruelingly",
693
+ "gryphon": "griffin",
694
+ "gryphons": "griffins",
695
+ "gynaecological": "gynecological",
696
+ "gynaecologist": "gynecologist",
697
+ "gynaecologists": "gynecologists",
698
+ "gynaecology": "gynecology",
699
+ "haematological": "hematological",
700
+ "haematologist": "hematologist",
701
+ "haematologists": "hematologists",
702
+ "haematology": "hematology",
703
+ "haemoglobin": "hemoglobin",
704
+ "haemophilia": "hemophilia",
705
+ "haemophiliac": "hemophiliac",
706
+ "haemophiliacs": "hemophiliacs",
707
+ "haemorrhage": "hemorrhage",
708
+ "haemorrhaged": "hemorrhaged",
709
+ "haemorrhages": "hemorrhages",
710
+ "haemorrhaging": "hemorrhaging",
711
+ "haemorrhoids": "hemorrhoids",
712
+ "harbour": "harbor",
713
+ "harboured": "harbored",
714
+ "harbouring": "harboring",
715
+ "harbours": "harbors",
716
+ "harmonisation": "harmonization",
717
+ "harmonise": "harmonize",
718
+ "harmonised": "harmonized",
719
+ "harmonises": "harmonizes",
720
+ "harmonising": "harmonizing",
721
+ "homoeopath": "homeopath",
722
+ "homoeopathic": "homeopathic",
723
+ "homoeopaths": "homeopaths",
724
+ "homoeopathy": "homeopathy",
725
+ "homogenise": "homogenize",
726
+ "homogenised": "homogenized",
727
+ "homogenises": "homogenizes",
728
+ "homogenising": "homogenizing",
729
+ "honour": "honor",
730
+ "honourable": "honorable",
731
+ "honourably": "honorably",
732
+ "honoured": "honored",
733
+ "honouring": "honoring",
734
+ "honours": "honors",
735
+ "hospitalisation": "hospitalization",
736
+ "hospitalise": "hospitalize",
737
+ "hospitalised": "hospitalized",
738
+ "hospitalises": "hospitalizes",
739
+ "hospitalising": "hospitalizing",
740
+ "humanise": "humanize",
741
+ "humanised": "humanized",
742
+ "humanises": "humanizes",
743
+ "humanising": "humanizing",
744
+ "humour": "humor",
745
+ "humoured": "humored",
746
+ "humouring": "humoring",
747
+ "humourless": "humorless",
748
+ "humours": "humors",
749
+ "hybridise": "hybridize",
750
+ "hybridised": "hybridized",
751
+ "hybridises": "hybridizes",
752
+ "hybridising": "hybridizing",
753
+ "hypnotise": "hypnotize",
754
+ "hypnotised": "hypnotized",
755
+ "hypnotises": "hypnotizes",
756
+ "hypnotising": "hypnotizing",
757
+ "hypothesise": "hypothesize",
758
+ "hypothesised": "hypothesized",
759
+ "hypothesises": "hypothesizes",
760
+ "hypothesising": "hypothesizing",
761
+ "idealisation": "idealization",
762
+ "idealise": "idealize",
763
+ "idealised": "idealized",
764
+ "idealises": "idealizes",
765
+ "idealising": "idealizing",
766
+ "idolise": "idolize",
767
+ "idolised": "idolized",
768
+ "idolises": "idolizes",
769
+ "idolising": "idolizing",
770
+ "immobilisation": "immobilization",
771
+ "immobilise": "immobilize",
772
+ "immobilised": "immobilized",
773
+ "immobiliser": "immobilizer",
774
+ "immobilisers": "immobilizers",
775
+ "immobilises": "immobilizes",
776
+ "immobilising": "immobilizing",
777
+ "immortalise": "immortalize",
778
+ "immortalised": "immortalized",
779
+ "immortalises": "immortalizes",
780
+ "immortalising": "immortalizing",
781
+ "immunisation": "immunization",
782
+ "immunise": "immunize",
783
+ "immunised": "immunized",
784
+ "immunises": "immunizes",
785
+ "immunising": "immunizing",
786
+ "impanelled": "impaneled",
787
+ "impanelling": "impaneling",
788
+ "imperilled": "imperiled",
789
+ "imperilling": "imperiling",
790
+ "individualise": "individualize",
791
+ "individualised": "individualized",
792
+ "individualises": "individualizes",
793
+ "individualising": "individualizing",
794
+ "industrialise": "industrialize",
795
+ "industrialised": "industrialized",
796
+ "industrialises": "industrializes",
797
+ "industrialising": "industrializing",
798
+ "inflexion": "inflection",
799
+ "inflexions": "inflections",
800
+ "initialise": "initialize",
801
+ "initialised": "initialized",
802
+ "initialises": "initializes",
803
+ "initialising": "initializing",
804
+ "initialled": "initialed",
805
+ "initialling": "initialing",
806
+ "instal": "install",
807
+ "instalment": "installment",
808
+ "instalments": "installments",
809
+ "instals": "installs",
810
+ "instil": "instill",
811
+ "instils": "instills",
812
+ "institutionalisation": "institutionalization",
813
+ "institutionalise": "institutionalize",
814
+ "institutionalised": "institutionalized",
815
+ "institutionalises": "institutionalizes",
816
+ "institutionalising": "institutionalizing",
817
+ "intellectualise": "intellectualize",
818
+ "intellectualised": "intellectualized",
819
+ "intellectualises": "intellectualizes",
820
+ "intellectualising": "intellectualizing",
821
+ "internalisation": "internalization",
822
+ "internalise": "internalize",
823
+ "internalised": "internalized",
824
+ "internalises": "internalizes",
825
+ "internalising": "internalizing",
826
+ "internationalisation": "internationalization",
827
+ "internationalise": "internationalize",
828
+ "internationalised": "internationalized",
829
+ "internationalises": "internationalizes",
830
+ "internationalising": "internationalizing",
831
+ "ionisation": "ionization",
832
+ "ionise": "ionize",
833
+ "ionised": "ionized",
834
+ "ioniser": "ionizer",
835
+ "ionisers": "ionizers",
836
+ "ionises": "ionizes",
837
+ "ionising": "ionizing",
838
+ "italicise": "italicize",
839
+ "italicised": "italicized",
840
+ "italicises": "italicizes",
841
+ "italicising": "italicizing",
842
+ "itemise": "itemize",
843
+ "itemised": "itemized",
844
+ "itemises": "itemizes",
845
+ "itemising": "itemizing",
846
+ "jeopardise": "jeopardize",
847
+ "jeopardised": "jeopardized",
848
+ "jeopardises": "jeopardizes",
849
+ "jeopardising": "jeopardizing",
850
+ "jewelled": "jeweled",
851
+ "jeweller": "jeweler",
852
+ "jewellers": "jewelers",
853
+ "jewellery": "jewelry",
854
+ "judgement": "judgment",
855
+ "kilogramme": "kilogram",
856
+ "kilogrammes": "kilograms",
857
+ "kilometre": "kilometer",
858
+ "kilometres": "kilometers",
859
+ "labelled": "labeled",
860
+ "labelling": "labeling",
861
+ "labour": "labor",
862
+ "laboured": "labored",
863
+ "labourer": "laborer",
864
+ "labourers": "laborers",
865
+ "labouring": "laboring",
866
+ "labours": "labors",
867
+ "lacklustre": "lackluster",
868
+ "legalisation": "legalization",
869
+ "legalise": "legalize",
870
+ "legalised": "legalized",
871
+ "legalises": "legalizes",
872
+ "legalising": "legalizing",
873
+ "legitimise": "legitimize",
874
+ "legitimised": "legitimized",
875
+ "legitimises": "legitimizes",
876
+ "legitimising": "legitimizing",
877
+ "leukaemia": "leukemia",
878
+ "levelled": "leveled",
879
+ "leveller": "leveler",
880
+ "levellers": "levelers",
881
+ "levelling": "leveling",
882
+ "libelled": "libeled",
883
+ "libelling": "libeling",
884
+ "libellous": "libelous",
885
+ "liberalisation": "liberalization",
886
+ "liberalise": "liberalize",
887
+ "liberalised": "liberalized",
888
+ "liberalises": "liberalizes",
889
+ "liberalising": "liberalizing",
890
+ "licence": "license",
891
+ "licenced": "licensed",
892
+ "licences": "licenses",
893
+ "licencing": "licensing",
894
+ "likeable": "likable",
895
+ "lionisation": "lionization",
896
+ "lionise": "lionize",
897
+ "lionised": "lionized",
898
+ "lionises": "lionizes",
899
+ "lionising": "lionizing",
900
+ "liquidise": "liquidize",
901
+ "liquidised": "liquidized",
902
+ "liquidiser": "liquidizer",
903
+ "liquidisers": "liquidizers",
904
+ "liquidises": "liquidizes",
905
+ "liquidising": "liquidizing",
906
+ "litre": "liter",
907
+ "litres": "liters",
908
+ "localise": "localize",
909
+ "localised": "localized",
910
+ "localises": "localizes",
911
+ "localising": "localizing",
912
+ "louvre": "louver",
913
+ "louvred": "louvered",
914
+ "louvres": "louvers",
915
+ "lustre": "luster",
916
+ "magnetise": "magnetize",
917
+ "magnetised": "magnetized",
918
+ "magnetises": "magnetizes",
919
+ "magnetising": "magnetizing",
920
+ "manoeuvrability": "maneuverability",
921
+ "manoeuvrable": "maneuverable",
922
+ "manoeuvre": "maneuver",
923
+ "manoeuvred": "maneuvered",
924
+ "manoeuvres": "maneuvers",
925
+ "manoeuvring": "maneuvering",
926
+ "manoeuvrings": "maneuverings",
927
+ "marginalisation": "marginalization",
928
+ "marginalise": "marginalize",
929
+ "marginalised": "marginalized",
930
+ "marginalises": "marginalizes",
931
+ "marginalising": "marginalizing",
932
+ "marshalled": "marshaled",
933
+ "marshalling": "marshaling",
934
+ "marvelled": "marveled",
935
+ "marvelling": "marveling",
936
+ "marvellous": "marvelous",
937
+ "marvellously": "marvelously",
938
+ "materialisation": "materialization",
939
+ "materialise": "materialize",
940
+ "materialised": "materialized",
941
+ "materialises": "materializes",
942
+ "materialising": "materializing",
943
+ "maximisation": "maximization",
944
+ "maximise": "maximize",
945
+ "maximised": "maximized",
946
+ "maximises": "maximizes",
947
+ "maximising": "maximizing",
948
+ "meagre": "meager",
949
+ "mechanisation": "mechanization",
950
+ "mechanise": "mechanize",
951
+ "mechanised": "mechanized",
952
+ "mechanises": "mechanizes",
953
+ "mechanising": "mechanizing",
954
+ "mediaeval": "medieval",
955
+ "memorialise": "memorialize",
956
+ "memorialised": "memorialized",
957
+ "memorialises": "memorializes",
958
+ "memorialising": "memorializing",
959
+ "memorise": "memorize",
960
+ "memorised": "memorized",
961
+ "memorises": "memorizes",
962
+ "memorising": "memorizing",
963
+ "mesmerise": "mesmerize",
964
+ "mesmerised": "mesmerized",
965
+ "mesmerises": "mesmerizes",
966
+ "mesmerising": "mesmerizing",
967
+ "metabolise": "metabolize",
968
+ "metabolised": "metabolized",
969
+ "metabolises": "metabolizes",
970
+ "metabolising": "metabolizing",
971
+ "metre": "meter",
972
+ "metres": "meters",
973
+ "mhm": "hmm",
974
+ "micrometre": "micrometer",
975
+ "micrometres": "micrometers",
976
+ "militarise": "militarize",
977
+ "militarised": "militarized",
978
+ "militarises": "militarizes",
979
+ "militarising": "militarizing",
980
+ "milligramme": "milligram",
981
+ "milligrammes": "milligrams",
982
+ "millilitre": "milliliter",
983
+ "millilitres": "milliliters",
984
+ "millimetre": "millimeter",
985
+ "millimetres": "millimeters",
986
+ "miniaturisation": "miniaturization",
987
+ "miniaturise": "miniaturize",
988
+ "miniaturised": "miniaturized",
989
+ "miniaturises": "miniaturizes",
990
+ "miniaturising": "miniaturizing",
991
+ "minibusses": "minibuses",
992
+ "minimise": "minimize",
993
+ "minimised": "minimized",
994
+ "minimises": "minimizes",
995
+ "minimising": "minimizing",
996
+ "misbehaviour": "misbehavior",
997
+ "misdemeanour": "misdemeanor",
998
+ "misdemeanours": "misdemeanors",
999
+ "misspelt": "misspelled",
1000
+ "mitre": "miter",
1001
+ "mitres": "miters",
1002
+ "mm": "hmm",
1003
+ "mmm": "hmm",
1004
+ "mobilisation": "mobilization",
1005
+ "mobilise": "mobilize",
1006
+ "mobilised": "mobilized",
1007
+ "mobilises": "mobilizes",
1008
+ "mobilising": "mobilizing",
1009
+ "modelled": "modeled",
1010
+ "modeller": "modeler",
1011
+ "modellers": "modelers",
1012
+ "modelling": "modeling",
1013
+ "modernise": "modernize",
1014
+ "modernised": "modernized",
1015
+ "modernises": "modernizes",
1016
+ "modernising": "modernizing",
1017
+ "moisturise": "moisturize",
1018
+ "moisturised": "moisturized",
1019
+ "moisturiser": "moisturizer",
1020
+ "moisturisers": "moisturizers",
1021
+ "moisturises": "moisturizes",
1022
+ "moisturising": "moisturizing",
1023
+ "monologue": "monolog",
1024
+ "monologues": "monologs",
1025
+ "monopolisation": "monopolization",
1026
+ "monopolise": "monopolize",
1027
+ "monopolised": "monopolized",
1028
+ "monopolises": "monopolizes",
1029
+ "monopolising": "monopolizing",
1030
+ "moralise": "moralize",
1031
+ "moralised": "moralized",
1032
+ "moralises": "moralizes",
1033
+ "moralising": "moralizing",
1034
+ "motorised": "motorized",
1035
+ "mould": "mold",
1036
+ "moulded": "molded",
1037
+ "moulder": "molder",
1038
+ "mouldered": "moldered",
1039
+ "mouldering": "moldering",
1040
+ "moulders": "molders",
1041
+ "mouldier": "moldier",
1042
+ "mouldiest": "moldiest",
1043
+ "moulding": "molding",
1044
+ "mouldings": "moldings",
1045
+ "moulds": "molds",
1046
+ "mouldy": "moldy",
1047
+ "moult": "molt",
1048
+ "moulted": "molted",
1049
+ "moulting": "molting",
1050
+ "moults": "molts",
1051
+ "moustache": "mustache",
1052
+ "moustached": "mustached",
1053
+ "moustaches": "mustaches",
1054
+ "moustachioed": "mustachioed",
1055
+ "multicoloured": "multicolored",
1056
+ "nationalisation": "nationalization",
1057
+ "nationalisations": "nationalizations",
1058
+ "nationalise": "nationalize",
1059
+ "nationalised": "nationalized",
1060
+ "nationalises": "nationalizes",
1061
+ "nationalising": "nationalizing",
1062
+ "naturalisation": "naturalization",
1063
+ "naturalise": "naturalize",
1064
+ "naturalised": "naturalized",
1065
+ "naturalises": "naturalizes",
1066
+ "naturalising": "naturalizing",
1067
+ "neighbour": "neighbor",
1068
+ "neighbourhood": "neighborhood",
1069
+ "neighbourhoods": "neighborhoods",
1070
+ "neighbouring": "neighboring",
1071
+ "neighbourliness": "neighborliness",
1072
+ "neighbourly": "neighborly",
1073
+ "neighbours": "neighbors",
1074
+ "neutralisation": "neutralization",
1075
+ "neutralise": "neutralize",
1076
+ "neutralised": "neutralized",
1077
+ "neutralises": "neutralizes",
1078
+ "neutralising": "neutralizing",
1079
+ "normalisation": "normalization",
1080
+ "normalise": "normalize",
1081
+ "normalised": "normalized",
1082
+ "normalises": "normalizes",
1083
+ "normalising": "normalizing",
1084
+ "odour": "odor",
1085
+ "odourless": "odorless",
1086
+ "odours": "odors",
1087
+ "oesophagus": "esophagus",
1088
+ "oesophaguses": "esophaguses",
1089
+ "oestrogen": "estrogen",
1090
+ "offence": "offense",
1091
+ "offences": "offenses",
1092
+ "omelette": "omelet",
1093
+ "omelettes": "omelets",
1094
+ "optimise": "optimize",
1095
+ "optimised": "optimized",
1096
+ "optimises": "optimizes",
1097
+ "optimising": "optimizing",
1098
+ "organisation": "organization",
1099
+ "organisational": "organizational",
1100
+ "organisations": "organizations",
1101
+ "organise": "organize",
1102
+ "organised": "organized",
1103
+ "organiser": "organizer",
1104
+ "organisers": "organizers",
1105
+ "organises": "organizes",
1106
+ "organising": "organizing",
1107
+ "orthopaedic": "orthopedic",
1108
+ "orthopaedics": "orthopedics",
1109
+ "ostracise": "ostracize",
1110
+ "ostracised": "ostracized",
1111
+ "ostracises": "ostracizes",
1112
+ "ostracising": "ostracizing",
1113
+ "outmanoeuvre": "outmaneuver",
1114
+ "outmanoeuvred": "outmaneuvered",
1115
+ "outmanoeuvres": "outmaneuvers",
1116
+ "outmanoeuvring": "outmaneuvering",
1117
+ "overemphasise": "overemphasize",
1118
+ "overemphasised": "overemphasized",
1119
+ "overemphasises": "overemphasizes",
1120
+ "overemphasising": "overemphasizing",
1121
+ "oxidisation": "oxidization",
1122
+ "oxidise": "oxidize",
1123
+ "oxidised": "oxidized",
1124
+ "oxidises": "oxidizes",
1125
+ "oxidising": "oxidizing",
1126
+ "paederast": "pederast",
1127
+ "paederasts": "pederasts",
1128
+ "paediatric": "pediatric",
1129
+ "paediatrician": "pediatrician",
1130
+ "paediatricians": "pediatricians",
1131
+ "paediatrics": "pediatrics",
1132
+ "paedophile": "pedophile",
1133
+ "paedophiles": "pedophiles",
1134
+ "paedophilia": "pedophilia",
1135
+ "palaeolithic": "paleolithic",
1136
+ "palaeontologist": "paleontologist",
1137
+ "palaeontologists": "paleontologists",
1138
+ "palaeontology": "paleontology",
1139
+ "panelled": "paneled",
1140
+ "panelling": "paneling",
1141
+ "panellist": "panelist",
1142
+ "panellists": "panelists",
1143
+ "paralyse": "paralyze",
1144
+ "paralysed": "paralyzed",
1145
+ "paralyses": "paralyzes",
1146
+ "paralysing": "paralyzing",
1147
+ "parcelled": "parceled",
1148
+ "parcelling": "parceling",
1149
+ "parlour": "parlor",
1150
+ "parlours": "parlors",
1151
+ "particularise": "particularize",
1152
+ "particularised": "particularized",
1153
+ "particularises": "particularizes",
1154
+ "particularising": "particularizing",
1155
+ "passivisation": "passivization",
1156
+ "passivise": "passivize",
1157
+ "passivised": "passivized",
1158
+ "passivises": "passivizes",
1159
+ "passivising": "passivizing",
1160
+ "pasteurisation": "pasteurization",
1161
+ "pasteurise": "pasteurize",
1162
+ "pasteurised": "pasteurized",
1163
+ "pasteurises": "pasteurizes",
1164
+ "pasteurising": "pasteurizing",
1165
+ "patronise": "patronize",
1166
+ "patronised": "patronized",
1167
+ "patronises": "patronizes",
1168
+ "patronising": "patronizing",
1169
+ "patronisingly": "patronizingly",
1170
+ "pedalled": "pedaled",
1171
+ "pedalling": "pedaling",
1172
+ "pedestrianisation": "pedestrianization",
1173
+ "pedestrianise": "pedestrianize",
1174
+ "pedestrianised": "pedestrianized",
1175
+ "pedestrianises": "pedestrianizes",
1176
+ "pedestrianising": "pedestrianizing",
1177
+ "penalise": "penalize",
1178
+ "penalised": "penalized",
1179
+ "penalises": "penalizes",
1180
+ "penalising": "penalizing",
1181
+ "pencilled": "penciled",
1182
+ "pencilling": "penciling",
1183
+ "personalise": "personalize",
1184
+ "personalised": "personalized",
1185
+ "personalises": "personalizes",
1186
+ "personalising": "personalizing",
1187
+ "pharmacopoeia": "pharmacopeia",
1188
+ "pharmacopoeias": "pharmacopeias",
1189
+ "philosophise": "philosophize",
1190
+ "philosophised": "philosophized",
1191
+ "philosophises": "philosophizes",
1192
+ "philosophising": "philosophizing",
1193
+ "philtre": "filter",
1194
+ "philtres": "filters",
1195
+ "phoney": "phony",
1196
+ "plagiarise": "plagiarize",
1197
+ "plagiarised": "plagiarized",
1198
+ "plagiarises": "plagiarizes",
1199
+ "plagiarising": "plagiarizing",
1200
+ "plough": "plow",
1201
+ "ploughed": "plowed",
1202
+ "ploughing": "plowing",
1203
+ "ploughman": "plowman",
1204
+ "ploughmen": "plowmen",
1205
+ "ploughs": "plows",
1206
+ "ploughshare": "plowshare",
1207
+ "ploughshares": "plowshares",
1208
+ "polarisation": "polarization",
1209
+ "polarise": "polarize",
1210
+ "polarised": "polarized",
1211
+ "polarises": "polarizes",
1212
+ "polarising": "polarizing",
1213
+ "politicisation": "politicization",
1214
+ "politicise": "politicize",
1215
+ "politicised": "politicized",
1216
+ "politicises": "politicizes",
1217
+ "politicising": "politicizing",
1218
+ "popularisation": "popularization",
1219
+ "popularise": "popularize",
1220
+ "popularised": "popularized",
1221
+ "popularises": "popularizes",
1222
+ "popularising": "popularizing",
1223
+ "pouffe": "pouf",
1224
+ "pouffes": "poufs",
1225
+ "practise": "practice",
1226
+ "practised": "practiced",
1227
+ "practises": "practices",
1228
+ "practising": "practicing",
1229
+ "praesidium": "presidium",
1230
+ "praesidiums": "presidiums",
1231
+ "pressurisation": "pressurization",
1232
+ "pressurise": "pressurize",
1233
+ "pressurised": "pressurized",
1234
+ "pressurises": "pressurizes",
1235
+ "pressurising": "pressurizing",
1236
+ "pretence": "pretense",
1237
+ "pretences": "pretenses",
1238
+ "primaeval": "primeval",
1239
+ "prioritisation": "prioritization",
1240
+ "prioritise": "prioritize",
1241
+ "prioritised": "prioritized",
1242
+ "prioritises": "prioritizes",
1243
+ "prioritising": "prioritizing",
1244
+ "privatisation": "privatization",
1245
+ "privatisations": "privatizations",
1246
+ "privatise": "privatize",
1247
+ "privatised": "privatized",
1248
+ "privatises": "privatizes",
1249
+ "privatising": "privatizing",
1250
+ "professionalisation": "professionalization",
1251
+ "professionalise": "professionalize",
1252
+ "professionalised": "professionalized",
1253
+ "professionalises": "professionalizes",
1254
+ "professionalising": "professionalizing",
1255
+ "programme": "program",
1256
+ "programmes": "programs",
1257
+ "prologue": "prolog",
1258
+ "prologues": "prologs",
1259
+ "propagandise": "propagandize",
1260
+ "propagandised": "propagandized",
1261
+ "propagandises": "propagandizes",
1262
+ "propagandising": "propagandizing",
1263
+ "proselytise": "proselytize",
1264
+ "proselytised": "proselytized",
1265
+ "proselytiser": "proselytizer",
1266
+ "proselytisers": "proselytizers",
1267
+ "proselytises": "proselytizes",
1268
+ "proselytising": "proselytizing",
1269
+ "psychoanalyse": "psychoanalyze",
1270
+ "psychoanalysed": "psychoanalyzed",
1271
+ "psychoanalyses": "psychoanalyzes",
1272
+ "psychoanalysing": "psychoanalyzing",
1273
+ "publicise": "publicize",
1274
+ "publicised": "publicized",
1275
+ "publicises": "publicizes",
1276
+ "publicising": "publicizing",
1277
+ "pulverisation": "pulverization",
1278
+ "pulverise": "pulverize",
1279
+ "pulverised": "pulverized",
1280
+ "pulverises": "pulverizes",
1281
+ "pulverising": "pulverizing",
1282
+ "pummelled": "pummel",
1283
+ "pummelling": "pummeled",
1284
+ "pyjama": "pajama",
1285
+ "pyjamas": "pajamas",
1286
+ "pzazz": "pizzazz",
1287
+ "quarrelled": "quarreled",
1288
+ "quarrelling": "quarreling",
1289
+ "radicalise": "radicalize",
1290
+ "radicalised": "radicalized",
1291
+ "radicalises": "radicalizes",
1292
+ "radicalising": "radicalizing",
1293
+ "rancour": "rancor",
1294
+ "randomise": "randomize",
1295
+ "randomised": "randomized",
1296
+ "randomises": "randomizes",
1297
+ "randomising": "randomizing",
1298
+ "rationalisation": "rationalization",
1299
+ "rationalisations": "rationalizations",
1300
+ "rationalise": "rationalize",
1301
+ "rationalised": "rationalized",
1302
+ "rationalises": "rationalizes",
1303
+ "rationalising": "rationalizing",
1304
+ "ravelled": "raveled",
1305
+ "ravelling": "raveling",
1306
+ "realisable": "realizable",
1307
+ "realisation": "realization",
1308
+ "realisations": "realizations",
1309
+ "realise": "realize",
1310
+ "realised": "realized",
1311
+ "realises": "realizes",
1312
+ "realising": "realizing",
1313
+ "recognisable": "recognizable",
1314
+ "recognisably": "recognizably",
1315
+ "recognisance": "recognizance",
1316
+ "recognise": "recognize",
1317
+ "recognised": "recognized",
1318
+ "recognises": "recognizes",
1319
+ "recognising": "recognizing",
1320
+ "reconnoitre": "reconnoiter",
1321
+ "reconnoitred": "reconnoitered",
1322
+ "reconnoitres": "reconnoiters",
1323
+ "reconnoitring": "reconnoitering",
1324
+ "refuelled": "refueled",
1325
+ "refuelling": "refueling",
1326
+ "regularisation": "regularization",
1327
+ "regularise": "regularize",
1328
+ "regularised": "regularized",
1329
+ "regularises": "regularizes",
1330
+ "regularising": "regularizing",
1331
+ "remodelled": "remodeled",
1332
+ "remodelling": "remodeling",
1333
+ "remould": "remold",
1334
+ "remoulded": "remolded",
1335
+ "remoulding": "remolding",
1336
+ "remoulds": "remolds",
1337
+ "reorganisation": "reorganization",
1338
+ "reorganisations": "reorganizations",
1339
+ "reorganise": "reorganize",
1340
+ "reorganised": "reorganized",
1341
+ "reorganises": "reorganizes",
1342
+ "reorganising": "reorganizing",
1343
+ "revelled": "reveled",
1344
+ "reveller": "reveler",
1345
+ "revellers": "revelers",
1346
+ "revelling": "reveling",
1347
+ "revitalise": "revitalize",
1348
+ "revitalised": "revitalized",
1349
+ "revitalises": "revitalizes",
1350
+ "revitalising": "revitalizing",
1351
+ "revolutionise": "revolutionize",
1352
+ "revolutionised": "revolutionized",
1353
+ "revolutionises": "revolutionizes",
1354
+ "revolutionising": "revolutionizing",
1355
+ "rhapsodise": "rhapsodize",
1356
+ "rhapsodised": "rhapsodized",
1357
+ "rhapsodises": "rhapsodizes",
1358
+ "rhapsodising": "rhapsodizing",
1359
+ "rigour": "rigor",
1360
+ "rigours": "rigors",
1361
+ "ritualised": "ritualized",
1362
+ "rivalled": "rivaled",
1363
+ "rivalling": "rivaling",
1364
+ "romanticise": "romanticize",
1365
+ "romanticised": "romanticized",
1366
+ "romanticises": "romanticizes",
1367
+ "romanticising": "romanticizing",
1368
+ "rumour": "rumor",
1369
+ "rumoured": "rumored",
1370
+ "rumours": "rumors",
1371
+ "sabre": "saber",
1372
+ "sabres": "sabers",
1373
+ "saltpetre": "saltpeter",
1374
+ "sanitise": "sanitize",
1375
+ "sanitised": "sanitized",
1376
+ "sanitises": "sanitizes",
1377
+ "sanitising": "sanitizing",
1378
+ "satirise": "satirize",
1379
+ "satirised": "satirized",
1380
+ "satirises": "satirizes",
1381
+ "satirising": "satirizing",
1382
+ "saviour": "savior",
1383
+ "saviours": "saviors",
1384
+ "savour": "savor",
1385
+ "savoured": "savored",
1386
+ "savouries": "savories",
1387
+ "savouring": "savoring",
1388
+ "savours": "savors",
1389
+ "savoury": "savory",
1390
+ "scandalise": "scandalize",
1391
+ "scandalised": "scandalized",
1392
+ "scandalises": "scandalizes",
1393
+ "scandalising": "scandalizing",
1394
+ "sceptic": "skeptic",
1395
+ "sceptical": "skeptical",
1396
+ "sceptically": "skeptically",
1397
+ "scepticism": "skepticism",
1398
+ "sceptics": "skeptics",
1399
+ "sceptre": "scepter",
1400
+ "sceptres": "scepters",
1401
+ "scrutinise": "scrutinize",
1402
+ "scrutinised": "scrutinized",
1403
+ "scrutinises": "scrutinizes",
1404
+ "scrutinising": "scrutinizing",
1405
+ "secularisation": "secularization",
1406
+ "secularise": "secularize",
1407
+ "secularised": "secularized",
1408
+ "secularises": "secularizes",
1409
+ "secularising": "secularizing",
1410
+ "sensationalise": "sensationalize",
1411
+ "sensationalised": "sensationalized",
1412
+ "sensationalises": "sensationalizes",
1413
+ "sensationalising": "sensationalizing",
1414
+ "sensitise": "sensitize",
1415
+ "sensitised": "sensitized",
1416
+ "sensitises": "sensitizes",
1417
+ "sensitising": "sensitizing",
1418
+ "sentimentalise": "sentimentalize",
1419
+ "sentimentalised": "sentimentalized",
1420
+ "sentimentalises": "sentimentalizes",
1421
+ "sentimentalising": "sentimentalizing",
1422
+ "sepulchre": "sepulcher",
1423
+ "sepulchres": "sepulchers",
1424
+ "serialisation": "serialization",
1425
+ "serialisations": "serializations",
1426
+ "serialise": "serialize",
1427
+ "serialised": "serialized",
1428
+ "serialises": "serializes",
1429
+ "serialising": "serializing",
1430
+ "sermonise": "sermonize",
1431
+ "sermonised": "sermonized",
1432
+ "sermonises": "sermonizes",
1433
+ "sermonising": "sermonizing",
1434
+ "sheikh": "sheik",
1435
+ "shovelled": "shoveled",
1436
+ "shovelling": "shoveling",
1437
+ "shrivelled": "shriveled",
1438
+ "shrivelling": "shriveling",
1439
+ "signalise": "signalize",
1440
+ "signalised": "signalized",
1441
+ "signalises": "signalizes",
1442
+ "signalising": "signalizing",
1443
+ "signalled": "signaled",
1444
+ "signalling": "signaling",
1445
+ "smoulder": "smolder",
1446
+ "smouldered": "smoldered",
1447
+ "smouldering": "smoldering",
1448
+ "smoulders": "smolders",
1449
+ "snivelled": "sniveled",
1450
+ "snivelling": "sniveling",
1451
+ "snorkelled": "snorkeled",
1452
+ "snorkelling": "snorkeling",
1453
+ "snowplough": "snowplow",
1454
+ "snowploughs": "snowplow",
1455
+ "socialisation": "socialization",
1456
+ "socialise": "socialize",
1457
+ "socialised": "socialized",
1458
+ "socialises": "socializes",
1459
+ "socialising": "socializing",
1460
+ "sodomise": "sodomize",
1461
+ "sodomised": "sodomized",
1462
+ "sodomises": "sodomizes",
1463
+ "sodomising": "sodomizing",
1464
+ "solemnise": "solemnize",
1465
+ "solemnised": "solemnized",
1466
+ "solemnises": "solemnizes",
1467
+ "solemnising": "solemnizing",
1468
+ "sombre": "somber",
1469
+ "specialisation": "specialization",
1470
+ "specialisations": "specializations",
1471
+ "specialise": "specialize",
1472
+ "specialised": "specialized",
1473
+ "specialises": "specializes",
1474
+ "specialising": "specializing",
1475
+ "spectre": "specter",
1476
+ "spectres": "specters",
1477
+ "spiralled": "spiraled",
1478
+ "spiralling": "spiraling",
1479
+ "splendour": "splendor",
1480
+ "splendours": "splendors",
1481
+ "squirrelled": "squirreled",
1482
+ "squirrelling": "squirreling",
1483
+ "stabilisation": "stabilization",
1484
+ "stabilise": "stabilize",
1485
+ "stabilised": "stabilized",
1486
+ "stabiliser": "stabilizer",
1487
+ "stabilisers": "stabilizers",
1488
+ "stabilises": "stabilizes",
1489
+ "stabilising": "stabilizing",
1490
+ "standardisation": "standardization",
1491
+ "standardise": "standardize",
1492
+ "standardised": "standardized",
1493
+ "standardises": "standardizes",
1494
+ "standardising": "standardizing",
1495
+ "stencilled": "stenciled",
1496
+ "stencilling": "stenciling",
1497
+ "sterilisation": "sterilization",
1498
+ "sterilisations": "sterilizations",
1499
+ "sterilise": "sterilize",
1500
+ "sterilised": "sterilized",
1501
+ "steriliser": "sterilizer",
1502
+ "sterilisers": "sterilizers",
1503
+ "sterilises": "sterilizes",
1504
+ "sterilising": "sterilizing",
1505
+ "stigmatisation": "stigmatization",
1506
+ "stigmatise": "stigmatize",
1507
+ "stigmatised": "stigmatized",
1508
+ "stigmatises": "stigmatizes",
1509
+ "stigmatising": "stigmatizing",
1510
+ "storey": "story",
1511
+ "storeys": "stories",
1512
+ "subsidisation": "subsidization",
1513
+ "subsidise": "subsidize",
1514
+ "subsidised": "subsidized",
1515
+ "subsidiser": "subsidizer",
1516
+ "subsidisers": "subsidizers",
1517
+ "subsidises": "subsidizes",
1518
+ "subsidising": "subsidizing",
1519
+ "succour": "succor",
1520
+ "succoured": "succored",
1521
+ "succouring": "succoring",
1522
+ "succours": "succors",
1523
+ "sulphate": "sulfate",
1524
+ "sulphates": "sulfates",
1525
+ "sulphide": "sulfide",
1526
+ "sulphides": "sulfides",
1527
+ "sulphur": "sulfur",
1528
+ "sulphurous": "sulfurous",
1529
+ "summarise": "summarize",
1530
+ "summarised": "summarized",
1531
+ "summarises": "summarizes",
1532
+ "summarising": "summarizing",
1533
+ "swivelled": "swiveled",
1534
+ "swivelling": "swiveling",
1535
+ "symbolise": "symbolize",
1536
+ "symbolised": "symbolized",
1537
+ "symbolises": "symbolizes",
1538
+ "symbolising": "symbolizing",
1539
+ "sympathise": "sympathize",
1540
+ "sympathised": "sympathized",
1541
+ "sympathiser": "sympathizer",
1542
+ "sympathisers": "sympathizers",
1543
+ "sympathises": "sympathizes",
1544
+ "sympathising": "sympathizing",
1545
+ "synchronisation": "synchronization",
1546
+ "synchronise": "synchronize",
1547
+ "synchronised": "synchronized",
1548
+ "synchronises": "synchronizes",
1549
+ "synchronising": "synchronizing",
1550
+ "synthesise": "synthesize",
1551
+ "synthesised": "synthesized",
1552
+ "synthesiser": "synthesizer",
1553
+ "synthesisers": "synthesizers",
1554
+ "synthesises": "synthesizes",
1555
+ "synthesising": "synthesizing",
1556
+ "syphon": "siphon",
1557
+ "syphoned": "siphoned",
1558
+ "syphoning": "siphoning",
1559
+ "syphons": "siphons",
1560
+ "systematisation": "systematization",
1561
+ "systematise": "systematize",
1562
+ "systematised": "systematized",
1563
+ "systematises": "systematizes",
1564
+ "systematising": "systematizing",
1565
+ "tantalise": "tantalize",
1566
+ "tantalised": "tantalized",
1567
+ "tantalises": "tantalizes",
1568
+ "tantalising": "tantalizing",
1569
+ "tantalisingly": "tantalizingly",
1570
+ "tasselled": "tasseled",
1571
+ "technicolour": "technicolor",
1572
+ "temporise": "temporize",
1573
+ "temporised": "temporized",
1574
+ "temporises": "temporizes",
1575
+ "temporising": "temporizing",
1576
+ "tenderise": "tenderize",
1577
+ "tenderised": "tenderized",
1578
+ "tenderises": "tenderizes",
1579
+ "tenderising": "tenderizing",
1580
+ "terrorise": "terrorize",
1581
+ "terrorised": "terrorized",
1582
+ "terrorises": "terrorizes",
1583
+ "terrorising": "terrorizing",
1584
+ "theatre": "theater",
1585
+ "theatregoer": "theatergoer",
1586
+ "theatregoers": "theatergoers",
1587
+ "theatres": "theaters",
1588
+ "theorise": "theorize",
1589
+ "theorised": "theorized",
1590
+ "theorises": "theorizes",
1591
+ "theorising": "theorizing",
1592
+ "tonne": "ton",
1593
+ "tonnes": "tons",
1594
+ "towelled": "toweled",
1595
+ "towelling": "toweling",
1596
+ "toxaemia": "toxemia",
1597
+ "tranquillise": "tranquilize",
1598
+ "tranquillised": "tranquilized",
1599
+ "tranquilliser": "tranquilizer",
1600
+ "tranquillisers": "tranquilizers",
1601
+ "tranquillises": "tranquilizes",
1602
+ "tranquillising": "tranquilizing",
1603
+ "tranquillity": "tranquility",
1604
+ "tranquillize": "tranquilize",
1605
+ "tranquillized": "tranquilized",
1606
+ "tranquillizer": "tranquilizer",
1607
+ "tranquillizers": "tranquilizers",
1608
+ "tranquillizes": "tranquilizes",
1609
+ "tranquillizing": "tranquilizing",
1610
+ "tranquilly": "tranquility",
1611
+ "transistorised": "transistorized",
1612
+ "traumatise": "traumatize",
1613
+ "traumatised": "traumatized",
1614
+ "traumatises": "traumatizes",
1615
+ "traumatising": "traumatizing",
1616
+ "travelled": "traveled",
1617
+ "traveller": "traveler",
1618
+ "travellers": "travelers",
1619
+ "travelling": "traveling",
1620
+ "travelog": "travelogue",
1621
+ "travelogs": "travelogues",
1622
+ "trialled": "trialed",
1623
+ "trialling": "trialing",
1624
+ "tricolour": "tricolor",
1625
+ "tricolours": "tricolors",
1626
+ "trivialise": "trivialize",
1627
+ "trivialised": "trivialized",
1628
+ "trivialises": "trivializes",
1629
+ "trivialising": "trivializing",
1630
+ "tumour": "tumor",
1631
+ "tumours": "tumors",
1632
+ "tunnelled": "tunneled",
1633
+ "tunnelling": "tunneling",
1634
+ "tyrannise": "tyrannize",
1635
+ "tyrannised": "tyrannized",
1636
+ "tyrannises": "tyrannizes",
1637
+ "tyrannising": "tyrannizing",
1638
+ "tyre": "tire",
1639
+ "tyres": "tires",
1640
+ "unauthorised": "unauthorized",
1641
+ "uncivilised": "uncivilized",
1642
+ "underutilised": "underutilized",
1643
+ "unequalled": "unequaled",
1644
+ "unfavourable": "unfavorable",
1645
+ "unfavourably": "unfavorably",
1646
+ "unionisation": "unionization",
1647
+ "unionise": "unionize",
1648
+ "unionised": "unionized",
1649
+ "unionises": "unionizes",
1650
+ "unionising": "unionizing",
1651
+ "unorganised": "unorganized",
1652
+ "unravelled": "unraveled",
1653
+ "unravelling": "unraveling",
1654
+ "unrecognisable": "unrecognizable",
1655
+ "unrecognised": "unrecognized",
1656
+ "unrivalled": "unrivaled",
1657
+ "unsavoury": "unsavory",
1658
+ "untrammelled": "untrammeled",
1659
+ "urbanisation": "urbanization",
1660
+ "urbanise": "urbanize",
1661
+ "urbanised": "urbanized",
1662
+ "urbanises": "urbanizes",
1663
+ "urbanising": "urbanizing",
1664
+ "utilisable": "utilizable",
1665
+ "utilisation": "utilization",
1666
+ "utilise": "utilize",
1667
+ "utilised": "utilized",
1668
+ "utilises": "utilizes",
1669
+ "utilising": "utilizing",
1670
+ "valour": "valor",
1671
+ "vandalise": "vandalize",
1672
+ "vandalised": "vandalized",
1673
+ "vandalises": "vandalizes",
1674
+ "vandalising": "vandalizing",
1675
+ "vaporisation": "vaporization",
1676
+ "vaporise": "vaporize",
1677
+ "vaporised": "vaporized",
1678
+ "vaporises": "vaporizes",
1679
+ "vaporising": "vaporizing",
1680
+ "vapour": "vapor",
1681
+ "vapours": "vapors",
1682
+ "verbalise": "verbalize",
1683
+ "verbalised": "verbalized",
1684
+ "verbalises": "verbalizes",
1685
+ "verbalising": "verbalizing",
1686
+ "victimisation": "victimization",
1687
+ "victimise": "victimize",
1688
+ "victimised": "victimized",
1689
+ "victimises": "victimizes",
1690
+ "victimising": "victimizing",
1691
+ "videodisc": "videodisk",
1692
+ "videodiscs": "videodisks",
1693
+ "vigour": "vigor",
1694
+ "visualisation": "visualization",
1695
+ "visualisations": "visualizations",
1696
+ "visualise": "visualize",
1697
+ "visualised": "visualized",
1698
+ "visualises": "visualizes",
1699
+ "visualising": "visualizing",
1700
+ "vocalisation": "vocalization",
1701
+ "vocalisations": "vocalizations",
1702
+ "vocalise": "vocalize",
1703
+ "vocalised": "vocalized",
1704
+ "vocalises": "vocalizes",
1705
+ "vocalising": "vocalizing",
1706
+ "vulcanised": "vulcanized",
1707
+ "vulgarisation": "vulgarization",
1708
+ "vulgarise": "vulgarize",
1709
+ "vulgarised": "vulgarized",
1710
+ "vulgarises": "vulgarizes",
1711
+ "vulgarising": "vulgarizing",
1712
+ "waggon": "wagon",
1713
+ "waggons": "wagons",
1714
+ "watercolour": "watercolor",
1715
+ "watercolours": "watercolors",
1716
+ "weaselled": "weaseled",
1717
+ "weaselling": "weaseling",
1718
+ "westernisation": "westernization",
1719
+ "westernise": "westernize",
1720
+ "westernised": "westernized",
1721
+ "westernises": "westernizes",
1722
+ "westernising": "westernizing",
1723
+ "womanise": "womanize",
1724
+ "womanised": "womanized",
1725
+ "womaniser": "womanizer",
1726
+ "womanisers": "womanizers",
1727
+ "womanises": "womanizes",
1728
+ "womanising": "womanizing",
1729
+ "woollen": "woolen",
1730
+ "woollens": "woolens",
1731
+ "woollies": "woolies",
1732
+ "woolly": "wooly",
1733
+ "worshipped": "worshiped",
1734
+ "worshipper": "worshiper",
1735
+ "worshipping": "worshiping",
1736
+ "yodelled": "yodeled",
1737
+ "yodelling": "yodeling",
1738
+ "yoghourt": "yogurt",
1739
+ "yoghourts": "yogurts",
1740
+ "yoghurt": "yogurt",
1741
+ "yoghurts": "yogurts"
1742
+ }
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d2305abe00d9b15ee3b341a3d05debb5a7965cdbc50f30a393804c681938a7b
3
+ size 84100933
preprocessor_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunk_length": 30,
3
+ "feature_extractor_type": "WhisperFeatureExtractor",
4
+ "feature_size": 80,
5
+ "hop_length": 160,
6
+ "n_fft": 400,
7
+ "n_samples": 480000,
8
+ "nb_max_frames": 3000,
9
+ "padding_side": "right",
10
+ "padding_value": 0.0,
11
+ "processor_class": "WhisperProcessor",
12
+ "return_attention_mask": false,
13
+ "sampling_rate": 16000
14
+ }
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6972ecad22e33fd22f34d584ffafaf9c9fe671a585062263f15c206ea3b584f8
3
+ size 14575
runs/Sep05_13-35-12_0d4d652f3487/events.out.tfevents.1693920912.0d4d652f3487.216790.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:572b5fb95f3716eccd52db17939de71788727646c6ccd7cdd7998c1f404fb2da
3
+ size 102779
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b867c8d285af98da4317da2f6849a3c86f61b1c1ba92ff7b064ccd9e9cc02416
3
+ size 627
special_tokens_map.json ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|endoftext|>",
4
+ "<|startoftranscript|>",
5
+ "<|en|>",
6
+ "<|zh|>",
7
+ "<|de|>",
8
+ "<|es|>",
9
+ "<|ru|>",
10
+ "<|ko|>",
11
+ "<|fr|>",
12
+ "<|ja|>",
13
+ "<|pt|>",
14
+ "<|tr|>",
15
+ "<|pl|>",
16
+ "<|ca|>",
17
+ "<|nl|>",
18
+ "<|ar|>",
19
+ "<|sv|>",
20
+ "<|it|>",
21
+ "<|id|>",
22
+ "<|hi|>",
23
+ "<|fi|>",
24
+ "<|vi|>",
25
+ "<|he|>",
26
+ "<|uk|>",
27
+ "<|el|>",
28
+ "<|ms|>",
29
+ "<|cs|>",
30
+ "<|ro|>",
31
+ "<|da|>",
32
+ "<|hu|>",
33
+ "<|ta|>",
34
+ "<|no|>",
35
+ "<|th|>",
36
+ "<|ur|>",
37
+ "<|hr|>",
38
+ "<|bg|>",
39
+ "<|lt|>",
40
+ "<|la|>",
41
+ "<|mi|>",
42
+ "<|ml|>",
43
+ "<|cy|>",
44
+ "<|sk|>",
45
+ "<|te|>",
46
+ "<|fa|>",
47
+ "<|lv|>",
48
+ "<|bn|>",
49
+ "<|sr|>",
50
+ "<|az|>",
51
+ "<|sl|>",
52
+ "<|kn|>",
53
+ "<|et|>",
54
+ "<|mk|>",
55
+ "<|br|>",
56
+ "<|eu|>",
57
+ "<|is|>",
58
+ "<|hy|>",
59
+ "<|ne|>",
60
+ "<|mn|>",
61
+ "<|bs|>",
62
+ "<|kk|>",
63
+ "<|sq|>",
64
+ "<|sw|>",
65
+ "<|gl|>",
66
+ "<|mr|>",
67
+ "<|pa|>",
68
+ "<|si|>",
69
+ "<|km|>",
70
+ "<|sn|>",
71
+ "<|yo|>",
72
+ "<|so|>",
73
+ "<|af|>",
74
+ "<|oc|>",
75
+ "<|ka|>",
76
+ "<|be|>",
77
+ "<|tg|>",
78
+ "<|sd|>",
79
+ "<|gu|>",
80
+ "<|am|>",
81
+ "<|yi|>",
82
+ "<|lo|>",
83
+ "<|uz|>",
84
+ "<|fo|>",
85
+ "<|ht|>",
86
+ "<|ps|>",
87
+ "<|tk|>",
88
+ "<|nn|>",
89
+ "<|mt|>",
90
+ "<|sa|>",
91
+ "<|lb|>",
92
+ "<|my|>",
93
+ "<|bo|>",
94
+ "<|tl|>",
95
+ "<|mg|>",
96
+ "<|as|>",
97
+ "<|tt|>",
98
+ "<|haw|>",
99
+ "<|ln|>",
100
+ "<|ha|>",
101
+ "<|ba|>",
102
+ "<|jw|>",
103
+ "<|su|>",
104
+ "<|translate|>",
105
+ "<|transcribe|>",
106
+ "<|startoflm|>",
107
+ "<|startofprev|>",
108
+ "<|nocaptions|>",
109
+ "<|notimestamps|>"
110
+ ],
111
+ "bos_token": {
112
+ "content": "<|endoftext|>",
113
+ "lstrip": false,
114
+ "normalized": true,
115
+ "rstrip": false,
116
+ "single_word": false
117
+ },
118
+ "eos_token": {
119
+ "content": "<|endoftext|>",
120
+ "lstrip": false,
121
+ "normalized": true,
122
+ "rstrip": false,
123
+ "single_word": false
124
+ },
125
+ "pad_token": "<|endoftext|>",
126
+ "unk_token": {
127
+ "content": "<|endoftext|>",
128
+ "lstrip": false,
129
+ "normalized": true,
130
+ "rstrip": false,
131
+ "single_word": false
132
+ }
133
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "bos_token": {
5
+ "__type": "AddedToken",
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "clean_up_tokenization_spaces": true,
13
+ "eos_token": {
14
+ "__type": "AddedToken",
15
+ "content": "<|endoftext|>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "errors": "replace",
22
+ "model_max_length": 1024,
23
+ "pad_token": null,
24
+ "processor_class": "WhisperProcessor",
25
+ "return_attention_mask": false,
26
+ "tokenizer_class": "WhisperTokenizer",
27
+ "unk_token": {
28
+ "__type": "AddedToken",
29
+ "content": "<|endoftext|>",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false
34
+ }
35
+ }
trainer_state.json ADDED
@@ -0,0 +1,3659 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 3.10345,
5
+ "eval_steps": 400,
6
+ "global_step": 14000,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.0,
13
+ "learning_rate": 5e-05,
14
+ "loss": 2.4874,
15
+ "step": 25
16
+ },
17
+ {
18
+ "epoch": 0.0,
19
+ "learning_rate": 0.0001,
20
+ "loss": 1.5625,
21
+ "step": 50
22
+ },
23
+ {
24
+ "epoch": 0.0,
25
+ "learning_rate": 0.00015,
26
+ "loss": 0.8084,
27
+ "step": 75
28
+ },
29
+ {
30
+ "epoch": 0.01,
31
+ "learning_rate": 0.0002,
32
+ "loss": 0.181,
33
+ "step": 100
34
+ },
35
+ {
36
+ "epoch": 0.01,
37
+ "learning_rate": 0.00025,
38
+ "loss": 0.1378,
39
+ "step": 125
40
+ },
41
+ {
42
+ "epoch": 0.01,
43
+ "learning_rate": 0.0003,
44
+ "loss": 0.1267,
45
+ "step": 150
46
+ },
47
+ {
48
+ "epoch": 0.01,
49
+ "learning_rate": 0.00035,
50
+ "loss": 0.1118,
51
+ "step": 175
52
+ },
53
+ {
54
+ "epoch": 0.01,
55
+ "learning_rate": 0.0004,
56
+ "loss": 0.1189,
57
+ "step": 200
58
+ },
59
+ {
60
+ "epoch": 0.01,
61
+ "learning_rate": 0.00045000000000000004,
62
+ "loss": 0.11,
63
+ "step": 225
64
+ },
65
+ {
66
+ "epoch": 0.01,
67
+ "learning_rate": 0.0005,
68
+ "loss": 0.1131,
69
+ "step": 250
70
+ },
71
+ {
72
+ "epoch": 0.01,
73
+ "learning_rate": 0.00055,
74
+ "loss": 0.1196,
75
+ "step": 275
76
+ },
77
+ {
78
+ "epoch": 0.01,
79
+ "learning_rate": 0.0006,
80
+ "loss": 0.0998,
81
+ "step": 300
82
+ },
83
+ {
84
+ "epoch": 0.02,
85
+ "learning_rate": 0.0006500000000000001,
86
+ "loss": 0.1087,
87
+ "step": 325
88
+ },
89
+ {
90
+ "epoch": 0.02,
91
+ "learning_rate": 0.0007,
92
+ "loss": 0.1098,
93
+ "step": 350
94
+ },
95
+ {
96
+ "epoch": 0.02,
97
+ "learning_rate": 0.00075,
98
+ "loss": 0.1138,
99
+ "step": 375
100
+ },
101
+ {
102
+ "epoch": 0.02,
103
+ "learning_rate": 0.0008,
104
+ "loss": 0.1212,
105
+ "step": 400
106
+ },
107
+ {
108
+ "epoch": 0.02,
109
+ "eval_loss": 0.5537986159324646,
110
+ "eval_runtime": 216.4127,
111
+ "eval_samples_per_second": 4.621,
112
+ "eval_steps_per_second": 0.291,
113
+ "step": 400
114
+ },
115
+ {
116
+ "epoch": 0.02,
117
+ "learning_rate": 0.00085,
118
+ "loss": 0.1228,
119
+ "step": 425
120
+ },
121
+ {
122
+ "epoch": 0.02,
123
+ "learning_rate": 0.0009000000000000001,
124
+ "loss": 0.1177,
125
+ "step": 450
126
+ },
127
+ {
128
+ "epoch": 0.02,
129
+ "learning_rate": 0.00095,
130
+ "loss": 0.1196,
131
+ "step": 475
132
+ },
133
+ {
134
+ "epoch": 0.03,
135
+ "learning_rate": 0.001,
136
+ "loss": 0.1214,
137
+ "step": 500
138
+ },
139
+ {
140
+ "epoch": 0.03,
141
+ "learning_rate": 0.0009987179487179487,
142
+ "loss": 0.1415,
143
+ "step": 525
144
+ },
145
+ {
146
+ "epoch": 0.03,
147
+ "learning_rate": 0.0009974358974358974,
148
+ "loss": 0.1491,
149
+ "step": 550
150
+ },
151
+ {
152
+ "epoch": 0.03,
153
+ "learning_rate": 0.0009961538461538463,
154
+ "loss": 0.1467,
155
+ "step": 575
156
+ },
157
+ {
158
+ "epoch": 0.03,
159
+ "learning_rate": 0.000994871794871795,
160
+ "loss": 0.1455,
161
+ "step": 600
162
+ },
163
+ {
164
+ "epoch": 0.03,
165
+ "learning_rate": 0.0009935897435897436,
166
+ "loss": 0.1393,
167
+ "step": 625
168
+ },
169
+ {
170
+ "epoch": 0.03,
171
+ "learning_rate": 0.0009923076923076923,
172
+ "loss": 0.1355,
173
+ "step": 650
174
+ },
175
+ {
176
+ "epoch": 0.03,
177
+ "learning_rate": 0.0009910256410256412,
178
+ "loss": 0.1299,
179
+ "step": 675
180
+ },
181
+ {
182
+ "epoch": 0.04,
183
+ "learning_rate": 0.0009897435897435899,
184
+ "loss": 0.124,
185
+ "step": 700
186
+ },
187
+ {
188
+ "epoch": 0.04,
189
+ "learning_rate": 0.0009884615384615385,
190
+ "loss": 0.1209,
191
+ "step": 725
192
+ },
193
+ {
194
+ "epoch": 0.04,
195
+ "learning_rate": 0.0009871794871794872,
196
+ "loss": 0.1247,
197
+ "step": 750
198
+ },
199
+ {
200
+ "epoch": 0.04,
201
+ "learning_rate": 0.0009858974358974359,
202
+ "loss": 0.1238,
203
+ "step": 775
204
+ },
205
+ {
206
+ "epoch": 0.04,
207
+ "learning_rate": 0.0009846153846153848,
208
+ "loss": 0.1211,
209
+ "step": 800
210
+ },
211
+ {
212
+ "epoch": 0.04,
213
+ "eval_loss": 1.2810015678405762,
214
+ "eval_runtime": 217.0046,
215
+ "eval_samples_per_second": 4.608,
216
+ "eval_steps_per_second": 0.29,
217
+ "step": 800
218
+ },
219
+ {
220
+ "epoch": 0.04,
221
+ "learning_rate": 0.0009833333333333332,
222
+ "loss": 0.1226,
223
+ "step": 825
224
+ },
225
+ {
226
+ "epoch": 0.04,
227
+ "learning_rate": 0.0009820512820512821,
228
+ "loss": 0.1267,
229
+ "step": 850
230
+ },
231
+ {
232
+ "epoch": 0.04,
233
+ "learning_rate": 0.000980820512820513,
234
+ "loss": 0.1039,
235
+ "step": 875
236
+ },
237
+ {
238
+ "epoch": 0.04,
239
+ "learning_rate": 0.0009795384615384616,
240
+ "loss": 0.1142,
241
+ "step": 900
242
+ },
243
+ {
244
+ "epoch": 0.05,
245
+ "learning_rate": 0.0009782564102564103,
246
+ "loss": 0.1062,
247
+ "step": 925
248
+ },
249
+ {
250
+ "epoch": 0.05,
251
+ "learning_rate": 0.000976974358974359,
252
+ "loss": 0.115,
253
+ "step": 950
254
+ },
255
+ {
256
+ "epoch": 0.05,
257
+ "learning_rate": 0.0009756923076923077,
258
+ "loss": 0.1131,
259
+ "step": 975
260
+ },
261
+ {
262
+ "epoch": 0.05,
263
+ "learning_rate": 0.0009744102564102564,
264
+ "loss": 0.1173,
265
+ "step": 1000
266
+ },
267
+ {
268
+ "epoch": 0.05,
269
+ "learning_rate": 0.0009731282051282051,
270
+ "loss": 0.1114,
271
+ "step": 1025
272
+ },
273
+ {
274
+ "epoch": 0.05,
275
+ "learning_rate": 0.0009718461538461539,
276
+ "loss": 0.1123,
277
+ "step": 1050
278
+ },
279
+ {
280
+ "epoch": 0.05,
281
+ "learning_rate": 0.0009705641025641025,
282
+ "loss": 0.1258,
283
+ "step": 1075
284
+ },
285
+ {
286
+ "epoch": 0.06,
287
+ "learning_rate": 0.0009692820512820512,
288
+ "loss": 0.1168,
289
+ "step": 1100
290
+ },
291
+ {
292
+ "epoch": 0.06,
293
+ "learning_rate": 0.000968,
294
+ "loss": 0.1193,
295
+ "step": 1125
296
+ },
297
+ {
298
+ "epoch": 0.06,
299
+ "learning_rate": 0.0009667179487179487,
300
+ "loss": 0.1216,
301
+ "step": 1150
302
+ },
303
+ {
304
+ "epoch": 0.06,
305
+ "learning_rate": 0.0009654358974358975,
306
+ "loss": 0.1257,
307
+ "step": 1175
308
+ },
309
+ {
310
+ "epoch": 0.06,
311
+ "learning_rate": 0.0009641538461538461,
312
+ "loss": 0.1219,
313
+ "step": 1200
314
+ },
315
+ {
316
+ "epoch": 0.06,
317
+ "eval_loss": 1.4006367921829224,
318
+ "eval_runtime": 216.1116,
319
+ "eval_samples_per_second": 4.627,
320
+ "eval_steps_per_second": 0.292,
321
+ "step": 1200
322
+ },
323
+ {
324
+ "epoch": 0.06,
325
+ "learning_rate": 0.0009628717948717949,
326
+ "loss": 0.1096,
327
+ "step": 1225
328
+ },
329
+ {
330
+ "epoch": 0.06,
331
+ "learning_rate": 0.0009615897435897436,
332
+ "loss": 0.1234,
333
+ "step": 1250
334
+ },
335
+ {
336
+ "epoch": 0.06,
337
+ "learning_rate": 0.0009603076923076923,
338
+ "loss": 0.1406,
339
+ "step": 1275
340
+ },
341
+ {
342
+ "epoch": 0.07,
343
+ "learning_rate": 0.0009590256410256411,
344
+ "loss": 0.1189,
345
+ "step": 1300
346
+ },
347
+ {
348
+ "epoch": 0.07,
349
+ "learning_rate": 0.0009577435897435897,
350
+ "loss": 0.1211,
351
+ "step": 1325
352
+ },
353
+ {
354
+ "epoch": 0.07,
355
+ "learning_rate": 0.0009564615384615385,
356
+ "loss": 0.1153,
357
+ "step": 1350
358
+ },
359
+ {
360
+ "epoch": 0.07,
361
+ "learning_rate": 0.0009551794871794872,
362
+ "loss": 0.1161,
363
+ "step": 1375
364
+ },
365
+ {
366
+ "epoch": 0.07,
367
+ "learning_rate": 0.000953897435897436,
368
+ "loss": 0.1179,
369
+ "step": 1400
370
+ },
371
+ {
372
+ "epoch": 0.07,
373
+ "learning_rate": 0.0009526153846153847,
374
+ "loss": 0.1102,
375
+ "step": 1425
376
+ },
377
+ {
378
+ "epoch": 0.07,
379
+ "learning_rate": 0.0009513333333333334,
380
+ "loss": 0.123,
381
+ "step": 1450
382
+ },
383
+ {
384
+ "epoch": 0.07,
385
+ "learning_rate": 0.0009500512820512821,
386
+ "loss": 0.1386,
387
+ "step": 1475
388
+ },
389
+ {
390
+ "epoch": 0.07,
391
+ "learning_rate": 0.0009487692307692308,
392
+ "loss": 0.0922,
393
+ "step": 1500
394
+ },
395
+ {
396
+ "epoch": 0.08,
397
+ "learning_rate": 0.0009474871794871796,
398
+ "loss": 0.1243,
399
+ "step": 1525
400
+ },
401
+ {
402
+ "epoch": 0.08,
403
+ "learning_rate": 0.0009462051282051282,
404
+ "loss": 0.0956,
405
+ "step": 1550
406
+ },
407
+ {
408
+ "epoch": 0.08,
409
+ "learning_rate": 0.000944923076923077,
410
+ "loss": 0.1027,
411
+ "step": 1575
412
+ },
413
+ {
414
+ "epoch": 0.08,
415
+ "learning_rate": 0.0009436410256410256,
416
+ "loss": 0.1142,
417
+ "step": 1600
418
+ },
419
+ {
420
+ "epoch": 0.08,
421
+ "eval_loss": 0.10953158885240555,
422
+ "eval_runtime": 216.1306,
423
+ "eval_samples_per_second": 4.627,
424
+ "eval_steps_per_second": 0.291,
425
+ "step": 1600
426
+ },
427
+ {
428
+ "epoch": 0.08,
429
+ "learning_rate": 0.0009423589743589744,
430
+ "loss": 0.117,
431
+ "step": 1625
432
+ },
433
+ {
434
+ "epoch": 0.08,
435
+ "learning_rate": 0.0009410769230769231,
436
+ "loss": 0.1146,
437
+ "step": 1650
438
+ },
439
+ {
440
+ "epoch": 0.08,
441
+ "learning_rate": 0.0009397948717948717,
442
+ "loss": 0.0974,
443
+ "step": 1675
444
+ },
445
+ {
446
+ "epoch": 0.09,
447
+ "learning_rate": 0.0009385128205128205,
448
+ "loss": 0.1041,
449
+ "step": 1700
450
+ },
451
+ {
452
+ "epoch": 0.09,
453
+ "learning_rate": 0.0009372307692307692,
454
+ "loss": 0.1048,
455
+ "step": 1725
456
+ },
457
+ {
458
+ "epoch": 0.09,
459
+ "learning_rate": 0.000935948717948718,
460
+ "loss": 0.0934,
461
+ "step": 1750
462
+ },
463
+ {
464
+ "epoch": 0.09,
465
+ "learning_rate": 0.0009346666666666667,
466
+ "loss": 0.0982,
467
+ "step": 1775
468
+ },
469
+ {
470
+ "epoch": 0.09,
471
+ "learning_rate": 0.0009333846153846154,
472
+ "loss": 0.1014,
473
+ "step": 1800
474
+ },
475
+ {
476
+ "epoch": 0.09,
477
+ "learning_rate": 0.0009321025641025641,
478
+ "loss": 0.1304,
479
+ "step": 1825
480
+ },
481
+ {
482
+ "epoch": 0.09,
483
+ "learning_rate": 0.0009308205128205128,
484
+ "loss": 0.0995,
485
+ "step": 1850
486
+ },
487
+ {
488
+ "epoch": 0.09,
489
+ "learning_rate": 0.0009295384615384616,
490
+ "loss": 0.0912,
491
+ "step": 1875
492
+ },
493
+ {
494
+ "epoch": 0.1,
495
+ "learning_rate": 0.0009282564102564102,
496
+ "loss": 0.0963,
497
+ "step": 1900
498
+ },
499
+ {
500
+ "epoch": 0.1,
501
+ "learning_rate": 0.000926974358974359,
502
+ "loss": 0.1051,
503
+ "step": 1925
504
+ },
505
+ {
506
+ "epoch": 0.1,
507
+ "learning_rate": 0.0009256923076923077,
508
+ "loss": 0.0987,
509
+ "step": 1950
510
+ },
511
+ {
512
+ "epoch": 0.1,
513
+ "learning_rate": 0.0009244102564102565,
514
+ "loss": 0.0974,
515
+ "step": 1975
516
+ },
517
+ {
518
+ "epoch": 0.1,
519
+ "learning_rate": 0.0009231282051282052,
520
+ "loss": 0.1087,
521
+ "step": 2000
522
+ },
523
+ {
524
+ "epoch": 0.1,
525
+ "eval_loss": 0.10780761390924454,
526
+ "eval_runtime": 216.1847,
527
+ "eval_samples_per_second": 4.626,
528
+ "eval_steps_per_second": 0.291,
529
+ "step": 2000
530
+ },
531
+ {
532
+ "epoch": 0.1,
533
+ "learning_rate": 0.0009218461538461538,
534
+ "loss": 0.1035,
535
+ "step": 2025
536
+ },
537
+ {
538
+ "epoch": 0.1,
539
+ "learning_rate": 0.0009205641025641026,
540
+ "loss": 0.1094,
541
+ "step": 2050
542
+ },
543
+ {
544
+ "epoch": 0.1,
545
+ "learning_rate": 0.0009192820512820513,
546
+ "loss": 0.0851,
547
+ "step": 2075
548
+ },
549
+ {
550
+ "epoch": 0.1,
551
+ "learning_rate": 0.0009180000000000001,
552
+ "loss": 0.0741,
553
+ "step": 2100
554
+ },
555
+ {
556
+ "epoch": 0.11,
557
+ "learning_rate": 0.0009167179487179488,
558
+ "loss": 0.0881,
559
+ "step": 2125
560
+ },
561
+ {
562
+ "epoch": 0.11,
563
+ "learning_rate": 0.0009154358974358975,
564
+ "loss": 0.0794,
565
+ "step": 2150
566
+ },
567
+ {
568
+ "epoch": 0.11,
569
+ "learning_rate": 0.0009141538461538462,
570
+ "loss": 0.1199,
571
+ "step": 2175
572
+ },
573
+ {
574
+ "epoch": 0.11,
575
+ "learning_rate": 0.0009128717948717948,
576
+ "loss": 0.1307,
577
+ "step": 2200
578
+ },
579
+ {
580
+ "epoch": 0.11,
581
+ "learning_rate": 0.0009115897435897436,
582
+ "loss": 0.108,
583
+ "step": 2225
584
+ },
585
+ {
586
+ "epoch": 0.11,
587
+ "learning_rate": 0.0009103076923076923,
588
+ "loss": 0.078,
589
+ "step": 2250
590
+ },
591
+ {
592
+ "epoch": 0.11,
593
+ "learning_rate": 0.000909025641025641,
594
+ "loss": 0.0853,
595
+ "step": 2275
596
+ },
597
+ {
598
+ "epoch": 0.12,
599
+ "learning_rate": 0.0009077435897435897,
600
+ "loss": 0.0852,
601
+ "step": 2300
602
+ },
603
+ {
604
+ "epoch": 0.12,
605
+ "learning_rate": 0.0009064615384615385,
606
+ "loss": 0.0856,
607
+ "step": 2325
608
+ },
609
+ {
610
+ "epoch": 0.12,
611
+ "learning_rate": 0.0009051794871794872,
612
+ "loss": 0.0663,
613
+ "step": 2350
614
+ },
615
+ {
616
+ "epoch": 0.12,
617
+ "learning_rate": 0.0009038974358974358,
618
+ "loss": 0.0818,
619
+ "step": 2375
620
+ },
621
+ {
622
+ "epoch": 0.12,
623
+ "learning_rate": 0.0009026153846153846,
624
+ "loss": 0.0724,
625
+ "step": 2400
626
+ },
627
+ {
628
+ "epoch": 0.12,
629
+ "eval_loss": 0.11282841861248016,
630
+ "eval_runtime": 215.7164,
631
+ "eval_samples_per_second": 4.636,
632
+ "eval_steps_per_second": 0.292,
633
+ "step": 2400
634
+ },
635
+ {
636
+ "epoch": 0.12,
637
+ "learning_rate": 0.0009013333333333333,
638
+ "loss": 0.0824,
639
+ "step": 2425
640
+ },
641
+ {
642
+ "epoch": 0.12,
643
+ "learning_rate": 0.0009000512820512821,
644
+ "loss": 0.091,
645
+ "step": 2450
646
+ },
647
+ {
648
+ "epoch": 0.12,
649
+ "learning_rate": 0.0008987692307692308,
650
+ "loss": 0.0697,
651
+ "step": 2475
652
+ },
653
+ {
654
+ "epoch": 0.12,
655
+ "learning_rate": 0.0008974871794871795,
656
+ "loss": 0.076,
657
+ "step": 2500
658
+ },
659
+ {
660
+ "epoch": 0.13,
661
+ "learning_rate": 0.0008962051282051282,
662
+ "loss": 0.0769,
663
+ "step": 2525
664
+ },
665
+ {
666
+ "epoch": 0.13,
667
+ "learning_rate": 0.0008949230769230769,
668
+ "loss": 0.0724,
669
+ "step": 2550
670
+ },
671
+ {
672
+ "epoch": 0.13,
673
+ "learning_rate": 0.0008936410256410257,
674
+ "loss": 0.068,
675
+ "step": 2575
676
+ },
677
+ {
678
+ "epoch": 0.13,
679
+ "learning_rate": 0.0008923589743589744,
680
+ "loss": 0.0632,
681
+ "step": 2600
682
+ },
683
+ {
684
+ "epoch": 0.13,
685
+ "learning_rate": 0.0008910769230769231,
686
+ "loss": 0.0825,
687
+ "step": 2625
688
+ },
689
+ {
690
+ "epoch": 0.13,
691
+ "learning_rate": 0.0008897948717948718,
692
+ "loss": 0.0831,
693
+ "step": 2650
694
+ },
695
+ {
696
+ "epoch": 0.13,
697
+ "learning_rate": 0.0008885128205128206,
698
+ "loss": 0.0689,
699
+ "step": 2675
700
+ },
701
+ {
702
+ "epoch": 0.14,
703
+ "learning_rate": 0.0008872307692307693,
704
+ "loss": 0.0741,
705
+ "step": 2700
706
+ },
707
+ {
708
+ "epoch": 0.14,
709
+ "learning_rate": 0.000885948717948718,
710
+ "loss": 0.0798,
711
+ "step": 2725
712
+ },
713
+ {
714
+ "epoch": 0.14,
715
+ "learning_rate": 0.0008846666666666667,
716
+ "loss": 0.0817,
717
+ "step": 2750
718
+ },
719
+ {
720
+ "epoch": 0.14,
721
+ "learning_rate": 0.0008833846153846154,
722
+ "loss": 0.081,
723
+ "step": 2775
724
+ },
725
+ {
726
+ "epoch": 0.14,
727
+ "learning_rate": 0.0008821025641025642,
728
+ "loss": 0.0867,
729
+ "step": 2800
730
+ },
731
+ {
732
+ "epoch": 0.14,
733
+ "eval_loss": 0.10396511852741241,
734
+ "eval_runtime": 216.3083,
735
+ "eval_samples_per_second": 4.623,
736
+ "eval_steps_per_second": 0.291,
737
+ "step": 2800
738
+ },
739
+ {
740
+ "epoch": 0.14,
741
+ "learning_rate": 0.0008808205128205128,
742
+ "loss": 0.091,
743
+ "step": 2825
744
+ },
745
+ {
746
+ "epoch": 0.14,
747
+ "learning_rate": 0.0008795384615384616,
748
+ "loss": 0.0801,
749
+ "step": 2850
750
+ },
751
+ {
752
+ "epoch": 0.14,
753
+ "learning_rate": 0.0008782564102564102,
754
+ "loss": 0.1067,
755
+ "step": 2875
756
+ },
757
+ {
758
+ "epoch": 0.14,
759
+ "learning_rate": 0.0008769743589743589,
760
+ "loss": 0.0848,
761
+ "step": 2900
762
+ },
763
+ {
764
+ "epoch": 0.15,
765
+ "learning_rate": 0.0008756923076923077,
766
+ "loss": 0.0846,
767
+ "step": 2925
768
+ },
769
+ {
770
+ "epoch": 0.15,
771
+ "learning_rate": 0.0008744102564102564,
772
+ "loss": 0.076,
773
+ "step": 2950
774
+ },
775
+ {
776
+ "epoch": 0.15,
777
+ "learning_rate": 0.0008731282051282051,
778
+ "loss": 0.0729,
779
+ "step": 2975
780
+ },
781
+ {
782
+ "epoch": 0.15,
783
+ "learning_rate": 0.0008718461538461538,
784
+ "loss": 0.0994,
785
+ "step": 3000
786
+ },
787
+ {
788
+ "epoch": 0.15,
789
+ "learning_rate": 0.0008705641025641026,
790
+ "loss": 0.0901,
791
+ "step": 3025
792
+ },
793
+ {
794
+ "epoch": 0.15,
795
+ "learning_rate": 0.0008692820512820513,
796
+ "loss": 0.0735,
797
+ "step": 3050
798
+ },
799
+ {
800
+ "epoch": 0.15,
801
+ "learning_rate": 0.0008680000000000001,
802
+ "loss": 0.0796,
803
+ "step": 3075
804
+ },
805
+ {
806
+ "epoch": 0.15,
807
+ "learning_rate": 0.0008667179487179487,
808
+ "loss": 0.0888,
809
+ "step": 3100
810
+ },
811
+ {
812
+ "epoch": 0.16,
813
+ "learning_rate": 0.0008654358974358974,
814
+ "loss": 0.09,
815
+ "step": 3125
816
+ },
817
+ {
818
+ "epoch": 0.16,
819
+ "learning_rate": 0.0008641538461538462,
820
+ "loss": 0.0698,
821
+ "step": 3150
822
+ },
823
+ {
824
+ "epoch": 0.16,
825
+ "learning_rate": 0.0008628717948717949,
826
+ "loss": 0.074,
827
+ "step": 3175
828
+ },
829
+ {
830
+ "epoch": 0.16,
831
+ "learning_rate": 0.0008615897435897437,
832
+ "loss": 0.0644,
833
+ "step": 3200
834
+ },
835
+ {
836
+ "epoch": 0.16,
837
+ "eval_loss": 0.09922421723604202,
838
+ "eval_runtime": 218.0471,
839
+ "eval_samples_per_second": 4.586,
840
+ "eval_steps_per_second": 0.289,
841
+ "step": 3200
842
+ },
843
+ {
844
+ "epoch": 0.16,
845
+ "learning_rate": 0.0008603076923076923,
846
+ "loss": 0.0788,
847
+ "step": 3225
848
+ },
849
+ {
850
+ "epoch": 0.16,
851
+ "learning_rate": 0.0008590256410256411,
852
+ "loss": 0.0797,
853
+ "step": 3250
854
+ },
855
+ {
856
+ "epoch": 0.16,
857
+ "learning_rate": 0.0008577948717948718,
858
+ "loss": 0.11,
859
+ "step": 3275
860
+ },
861
+ {
862
+ "epoch": 0.17,
863
+ "learning_rate": 0.0008565128205128206,
864
+ "loss": 0.1003,
865
+ "step": 3300
866
+ },
867
+ {
868
+ "epoch": 0.17,
869
+ "learning_rate": 0.0008552307692307693,
870
+ "loss": 0.091,
871
+ "step": 3325
872
+ },
873
+ {
874
+ "epoch": 0.17,
875
+ "learning_rate": 0.0008539487179487179,
876
+ "loss": 0.1001,
877
+ "step": 3350
878
+ },
879
+ {
880
+ "epoch": 0.17,
881
+ "learning_rate": 0.0008526666666666667,
882
+ "loss": 0.087,
883
+ "step": 3375
884
+ },
885
+ {
886
+ "epoch": 0.17,
887
+ "learning_rate": 0.0008513846153846154,
888
+ "loss": 0.0857,
889
+ "step": 3400
890
+ },
891
+ {
892
+ "epoch": 0.17,
893
+ "learning_rate": 0.0008501025641025642,
894
+ "loss": 0.1035,
895
+ "step": 3425
896
+ },
897
+ {
898
+ "epoch": 0.17,
899
+ "learning_rate": 0.0008488205128205129,
900
+ "loss": 0.0913,
901
+ "step": 3450
902
+ },
903
+ {
904
+ "epoch": 0.17,
905
+ "learning_rate": 0.0008475384615384616,
906
+ "loss": 0.1002,
907
+ "step": 3475
908
+ },
909
+ {
910
+ "epoch": 0.17,
911
+ "learning_rate": 0.0008462564102564103,
912
+ "loss": 0.1144,
913
+ "step": 3500
914
+ },
915
+ {
916
+ "epoch": 0.18,
917
+ "learning_rate": 0.000844974358974359,
918
+ "loss": 0.1348,
919
+ "step": 3525
920
+ },
921
+ {
922
+ "epoch": 0.18,
923
+ "learning_rate": 0.0008436923076923078,
924
+ "loss": 0.0958,
925
+ "step": 3550
926
+ },
927
+ {
928
+ "epoch": 0.18,
929
+ "learning_rate": 0.0008424102564102565,
930
+ "loss": 0.116,
931
+ "step": 3575
932
+ },
933
+ {
934
+ "epoch": 0.18,
935
+ "learning_rate": 0.0008411282051282051,
936
+ "loss": 0.0895,
937
+ "step": 3600
938
+ },
939
+ {
940
+ "epoch": 0.18,
941
+ "eval_loss": 0.1009831354022026,
942
+ "eval_runtime": 216.8096,
943
+ "eval_samples_per_second": 4.612,
944
+ "eval_steps_per_second": 0.291,
945
+ "step": 3600
946
+ },
947
+ {
948
+ "epoch": 0.18,
949
+ "learning_rate": 0.0008398461538461538,
950
+ "loss": 0.0824,
951
+ "step": 3625
952
+ },
953
+ {
954
+ "epoch": 0.18,
955
+ "learning_rate": 0.0008385641025641026,
956
+ "loss": 0.1105,
957
+ "step": 3650
958
+ },
959
+ {
960
+ "epoch": 0.18,
961
+ "learning_rate": 0.0008372820512820513,
962
+ "loss": 0.1328,
963
+ "step": 3675
964
+ },
965
+ {
966
+ "epoch": 0.18,
967
+ "learning_rate": 0.0008359999999999999,
968
+ "loss": 0.1774,
969
+ "step": 3700
970
+ },
971
+ {
972
+ "epoch": 0.19,
973
+ "learning_rate": 0.0008347179487179487,
974
+ "loss": 0.1158,
975
+ "step": 3725
976
+ },
977
+ {
978
+ "epoch": 0.19,
979
+ "learning_rate": 0.0008334358974358974,
980
+ "loss": 0.1067,
981
+ "step": 3750
982
+ },
983
+ {
984
+ "epoch": 0.19,
985
+ "learning_rate": 0.0008321538461538462,
986
+ "loss": 0.0855,
987
+ "step": 3775
988
+ },
989
+ {
990
+ "epoch": 0.19,
991
+ "learning_rate": 0.0008308717948717949,
992
+ "loss": 0.0917,
993
+ "step": 3800
994
+ },
995
+ {
996
+ "epoch": 0.19,
997
+ "learning_rate": 0.0008295897435897436,
998
+ "loss": 0.0945,
999
+ "step": 3825
1000
+ },
1001
+ {
1002
+ "epoch": 0.19,
1003
+ "learning_rate": 0.0008283076923076923,
1004
+ "loss": 0.0863,
1005
+ "step": 3850
1006
+ },
1007
+ {
1008
+ "epoch": 0.19,
1009
+ "learning_rate": 0.000827025641025641,
1010
+ "loss": 0.1196,
1011
+ "step": 3875
1012
+ },
1013
+ {
1014
+ "epoch": 0.2,
1015
+ "learning_rate": 0.0008257435897435898,
1016
+ "loss": 0.0989,
1017
+ "step": 3900
1018
+ },
1019
+ {
1020
+ "epoch": 0.2,
1021
+ "learning_rate": 0.0008244615384615385,
1022
+ "loss": 0.1076,
1023
+ "step": 3925
1024
+ },
1025
+ {
1026
+ "epoch": 0.2,
1027
+ "learning_rate": 0.0008231794871794872,
1028
+ "loss": 0.1138,
1029
+ "step": 3950
1030
+ },
1031
+ {
1032
+ "epoch": 0.2,
1033
+ "learning_rate": 0.0008218974358974359,
1034
+ "loss": 0.1353,
1035
+ "step": 3975
1036
+ },
1037
+ {
1038
+ "epoch": 1.0,
1039
+ "learning_rate": 0.0008206153846153847,
1040
+ "loss": 0.236,
1041
+ "step": 4000
1042
+ },
1043
+ {
1044
+ "epoch": 1.0,
1045
+ "eval_loss": 0.9967941641807556,
1046
+ "eval_runtime": 215.9827,
1047
+ "eval_samples_per_second": 4.63,
1048
+ "eval_steps_per_second": 0.292,
1049
+ "step": 4000
1050
+ },
1051
+ {
1052
+ "epoch": 1.0,
1053
+ "learning_rate": 0.0008193846153846154,
1054
+ "loss": 0.382,
1055
+ "step": 4025
1056
+ },
1057
+ {
1058
+ "epoch": 1.0,
1059
+ "learning_rate": 0.0008181025641025642,
1060
+ "loss": 0.3524,
1061
+ "step": 4050
1062
+ },
1063
+ {
1064
+ "epoch": 1.0,
1065
+ "learning_rate": 0.0008168205128205128,
1066
+ "loss": 0.1807,
1067
+ "step": 4075
1068
+ },
1069
+ {
1070
+ "epoch": 1.01,
1071
+ "learning_rate": 0.0008155384615384615,
1072
+ "loss": 0.1568,
1073
+ "step": 4100
1074
+ },
1075
+ {
1076
+ "epoch": 1.01,
1077
+ "learning_rate": 0.0008142564102564103,
1078
+ "loss": 0.1533,
1079
+ "step": 4125
1080
+ },
1081
+ {
1082
+ "epoch": 1.01,
1083
+ "learning_rate": 0.000812974358974359,
1084
+ "loss": 0.1324,
1085
+ "step": 4150
1086
+ },
1087
+ {
1088
+ "epoch": 1.01,
1089
+ "learning_rate": 0.0008116923076923078,
1090
+ "loss": 0.1379,
1091
+ "step": 4175
1092
+ },
1093
+ {
1094
+ "epoch": 1.01,
1095
+ "learning_rate": 0.0008104102564102564,
1096
+ "loss": 0.1332,
1097
+ "step": 4200
1098
+ },
1099
+ {
1100
+ "epoch": 1.01,
1101
+ "learning_rate": 0.0008091282051282052,
1102
+ "loss": 0.1431,
1103
+ "step": 4225
1104
+ },
1105
+ {
1106
+ "epoch": 1.01,
1107
+ "learning_rate": 0.0008078461538461539,
1108
+ "loss": 0.1466,
1109
+ "step": 4250
1110
+ },
1111
+ {
1112
+ "epoch": 1.01,
1113
+ "learning_rate": 0.0008065641025641026,
1114
+ "loss": 0.1188,
1115
+ "step": 4275
1116
+ },
1117
+ {
1118
+ "epoch": 1.02,
1119
+ "learning_rate": 0.0008052820512820514,
1120
+ "loss": 0.1295,
1121
+ "step": 4300
1122
+ },
1123
+ {
1124
+ "epoch": 1.02,
1125
+ "learning_rate": 0.000804,
1126
+ "loss": 0.1378,
1127
+ "step": 4325
1128
+ },
1129
+ {
1130
+ "epoch": 1.02,
1131
+ "learning_rate": 0.0008027179487179488,
1132
+ "loss": 0.1197,
1133
+ "step": 4350
1134
+ },
1135
+ {
1136
+ "epoch": 1.02,
1137
+ "learning_rate": 0.0008014358974358974,
1138
+ "loss": 0.1231,
1139
+ "step": 4375
1140
+ },
1141
+ {
1142
+ "epoch": 1.02,
1143
+ "learning_rate": 0.0008001538461538462,
1144
+ "loss": 0.1268,
1145
+ "step": 4400
1146
+ },
1147
+ {
1148
+ "epoch": 1.02,
1149
+ "eval_loss": 1.367616891860962,
1150
+ "eval_runtime": 215.6124,
1151
+ "eval_samples_per_second": 4.638,
1152
+ "eval_steps_per_second": 0.292,
1153
+ "step": 4400
1154
+ },
1155
+ {
1156
+ "epoch": 1.02,
1157
+ "learning_rate": 0.0007988717948717948,
1158
+ "loss": 0.1283,
1159
+ "step": 4425
1160
+ },
1161
+ {
1162
+ "epoch": 1.02,
1163
+ "learning_rate": 0.0007975897435897435,
1164
+ "loss": 0.112,
1165
+ "step": 4450
1166
+ },
1167
+ {
1168
+ "epoch": 1.02,
1169
+ "learning_rate": 0.0007963076923076923,
1170
+ "loss": 0.1102,
1171
+ "step": 4475
1172
+ },
1173
+ {
1174
+ "epoch": 1.03,
1175
+ "learning_rate": 0.000795025641025641,
1176
+ "loss": 0.1225,
1177
+ "step": 4500
1178
+ },
1179
+ {
1180
+ "epoch": 1.03,
1181
+ "learning_rate": 0.0007937435897435898,
1182
+ "loss": 0.1155,
1183
+ "step": 4525
1184
+ },
1185
+ {
1186
+ "epoch": 1.03,
1187
+ "learning_rate": 0.0007924615384615384,
1188
+ "loss": 0.1176,
1189
+ "step": 4550
1190
+ },
1191
+ {
1192
+ "epoch": 1.03,
1193
+ "learning_rate": 0.0007911794871794872,
1194
+ "loss": 0.1049,
1195
+ "step": 4575
1196
+ },
1197
+ {
1198
+ "epoch": 1.03,
1199
+ "learning_rate": 0.0007898974358974359,
1200
+ "loss": 0.1204,
1201
+ "step": 4600
1202
+ },
1203
+ {
1204
+ "epoch": 1.03,
1205
+ "learning_rate": 0.0007886153846153847,
1206
+ "loss": 0.1112,
1207
+ "step": 4625
1208
+ },
1209
+ {
1210
+ "epoch": 1.03,
1211
+ "learning_rate": 0.0007873333333333334,
1212
+ "loss": 0.1064,
1213
+ "step": 4650
1214
+ },
1215
+ {
1216
+ "epoch": 1.03,
1217
+ "learning_rate": 0.000786051282051282,
1218
+ "loss": 0.1108,
1219
+ "step": 4675
1220
+ },
1221
+ {
1222
+ "epoch": 1.04,
1223
+ "learning_rate": 0.0007847692307692308,
1224
+ "loss": 0.1067,
1225
+ "step": 4700
1226
+ },
1227
+ {
1228
+ "epoch": 1.04,
1229
+ "learning_rate": 0.0007834871794871795,
1230
+ "loss": 0.0994,
1231
+ "step": 4725
1232
+ },
1233
+ {
1234
+ "epoch": 1.04,
1235
+ "learning_rate": 0.0007822051282051283,
1236
+ "loss": 0.105,
1237
+ "step": 4750
1238
+ },
1239
+ {
1240
+ "epoch": 1.04,
1241
+ "learning_rate": 0.000780923076923077,
1242
+ "loss": 0.0964,
1243
+ "step": 4775
1244
+ },
1245
+ {
1246
+ "epoch": 1.04,
1247
+ "learning_rate": 0.0007796410256410257,
1248
+ "loss": 0.1118,
1249
+ "step": 4800
1250
+ },
1251
+ {
1252
+ "epoch": 1.04,
1253
+ "eval_loss": 1.3230507373809814,
1254
+ "eval_runtime": 217.4474,
1255
+ "eval_samples_per_second": 4.599,
1256
+ "eval_steps_per_second": 0.29,
1257
+ "step": 4800
1258
+ },
1259
+ {
1260
+ "epoch": 1.04,
1261
+ "learning_rate": 0.0007783589743589744,
1262
+ "loss": 0.1062,
1263
+ "step": 4825
1264
+ },
1265
+ {
1266
+ "epoch": 1.04,
1267
+ "learning_rate": 0.0007770769230769231,
1268
+ "loss": 0.0876,
1269
+ "step": 4850
1270
+ },
1271
+ {
1272
+ "epoch": 1.04,
1273
+ "learning_rate": 0.0007757948717948719,
1274
+ "loss": 0.0958,
1275
+ "step": 4875
1276
+ },
1277
+ {
1278
+ "epoch": 1.05,
1279
+ "learning_rate": 0.0007745128205128205,
1280
+ "loss": 0.0915,
1281
+ "step": 4900
1282
+ },
1283
+ {
1284
+ "epoch": 1.05,
1285
+ "learning_rate": 0.0007732307692307693,
1286
+ "loss": 0.0935,
1287
+ "step": 4925
1288
+ },
1289
+ {
1290
+ "epoch": 1.05,
1291
+ "learning_rate": 0.000771948717948718,
1292
+ "loss": 0.0947,
1293
+ "step": 4950
1294
+ },
1295
+ {
1296
+ "epoch": 1.05,
1297
+ "learning_rate": 0.0007706666666666668,
1298
+ "loss": 0.0985,
1299
+ "step": 4975
1300
+ },
1301
+ {
1302
+ "epoch": 1.05,
1303
+ "learning_rate": 0.0007693846153846154,
1304
+ "loss": 0.0913,
1305
+ "step": 5000
1306
+ },
1307
+ {
1308
+ "epoch": 1.05,
1309
+ "learning_rate": 0.000768102564102564,
1310
+ "loss": 0.0933,
1311
+ "step": 5025
1312
+ },
1313
+ {
1314
+ "epoch": 1.05,
1315
+ "learning_rate": 0.0007668205128205128,
1316
+ "loss": 0.0953,
1317
+ "step": 5050
1318
+ },
1319
+ {
1320
+ "epoch": 1.05,
1321
+ "learning_rate": 0.0007655384615384615,
1322
+ "loss": 0.0958,
1323
+ "step": 5075
1324
+ },
1325
+ {
1326
+ "epoch": 1.06,
1327
+ "learning_rate": 0.0007642564102564103,
1328
+ "loss": 0.0938,
1329
+ "step": 5100
1330
+ },
1331
+ {
1332
+ "epoch": 1.06,
1333
+ "learning_rate": 0.000762974358974359,
1334
+ "loss": 0.0886,
1335
+ "step": 5125
1336
+ },
1337
+ {
1338
+ "epoch": 1.06,
1339
+ "learning_rate": 0.0007616923076923077,
1340
+ "loss": 0.0897,
1341
+ "step": 5150
1342
+ },
1343
+ {
1344
+ "epoch": 1.06,
1345
+ "learning_rate": 0.0007604102564102564,
1346
+ "loss": 0.0925,
1347
+ "step": 5175
1348
+ },
1349
+ {
1350
+ "epoch": 1.06,
1351
+ "learning_rate": 0.0007591282051282051,
1352
+ "loss": 0.0868,
1353
+ "step": 5200
1354
+ },
1355
+ {
1356
+ "epoch": 1.06,
1357
+ "eval_loss": 1.243537187576294,
1358
+ "eval_runtime": 216.704,
1359
+ "eval_samples_per_second": 4.615,
1360
+ "eval_steps_per_second": 0.291,
1361
+ "step": 5200
1362
+ },
1363
+ {
1364
+ "epoch": 1.06,
1365
+ "learning_rate": 0.0007578461538461539,
1366
+ "loss": 0.0964,
1367
+ "step": 5225
1368
+ },
1369
+ {
1370
+ "epoch": 1.06,
1371
+ "learning_rate": 0.0007565641025641025,
1372
+ "loss": 0.094,
1373
+ "step": 5250
1374
+ },
1375
+ {
1376
+ "epoch": 1.06,
1377
+ "learning_rate": 0.0007552820512820513,
1378
+ "loss": 0.0932,
1379
+ "step": 5275
1380
+ },
1381
+ {
1382
+ "epoch": 1.07,
1383
+ "learning_rate": 0.000754,
1384
+ "loss": 0.0908,
1385
+ "step": 5300
1386
+ },
1387
+ {
1388
+ "epoch": 1.07,
1389
+ "learning_rate": 0.0007527179487179488,
1390
+ "loss": 0.0903,
1391
+ "step": 5325
1392
+ },
1393
+ {
1394
+ "epoch": 1.07,
1395
+ "learning_rate": 0.0007514358974358975,
1396
+ "loss": 0.0914,
1397
+ "step": 5350
1398
+ },
1399
+ {
1400
+ "epoch": 1.07,
1401
+ "learning_rate": 0.0007501538461538461,
1402
+ "loss": 0.0947,
1403
+ "step": 5375
1404
+ },
1405
+ {
1406
+ "epoch": 1.07,
1407
+ "learning_rate": 0.0007488717948717949,
1408
+ "loss": 0.0861,
1409
+ "step": 5400
1410
+ },
1411
+ {
1412
+ "epoch": 1.07,
1413
+ "learning_rate": 0.0007475897435897436,
1414
+ "loss": 0.087,
1415
+ "step": 5425
1416
+ },
1417
+ {
1418
+ "epoch": 1.07,
1419
+ "learning_rate": 0.0007463076923076924,
1420
+ "loss": 0.1056,
1421
+ "step": 5450
1422
+ },
1423
+ {
1424
+ "epoch": 1.07,
1425
+ "learning_rate": 0.0007450256410256411,
1426
+ "loss": 0.0759,
1427
+ "step": 5475
1428
+ },
1429
+ {
1430
+ "epoch": 1.08,
1431
+ "learning_rate": 0.0007437435897435898,
1432
+ "loss": 0.0963,
1433
+ "step": 5500
1434
+ },
1435
+ {
1436
+ "epoch": 1.08,
1437
+ "learning_rate": 0.0007424615384615385,
1438
+ "loss": 0.079,
1439
+ "step": 5525
1440
+ },
1441
+ {
1442
+ "epoch": 1.08,
1443
+ "learning_rate": 0.0007411794871794872,
1444
+ "loss": 0.0808,
1445
+ "step": 5550
1446
+ },
1447
+ {
1448
+ "epoch": 1.08,
1449
+ "learning_rate": 0.000739897435897436,
1450
+ "loss": 0.0941,
1451
+ "step": 5575
1452
+ },
1453
+ {
1454
+ "epoch": 1.08,
1455
+ "learning_rate": 0.0007386153846153845,
1456
+ "loss": 0.0985,
1457
+ "step": 5600
1458
+ },
1459
+ {
1460
+ "epoch": 1.08,
1461
+ "eval_loss": 0.10466726869344711,
1462
+ "eval_runtime": 220.9516,
1463
+ "eval_samples_per_second": 4.526,
1464
+ "eval_steps_per_second": 0.285,
1465
+ "step": 5600
1466
+ },
1467
+ {
1468
+ "epoch": 1.08,
1469
+ "learning_rate": 0.0007373333333333333,
1470
+ "loss": 0.096,
1471
+ "step": 5625
1472
+ },
1473
+ {
1474
+ "epoch": 1.08,
1475
+ "learning_rate": 0.000736051282051282,
1476
+ "loss": 0.0836,
1477
+ "step": 5650
1478
+ },
1479
+ {
1480
+ "epoch": 1.08,
1481
+ "learning_rate": 0.0007347692307692308,
1482
+ "loss": 0.0871,
1483
+ "step": 5675
1484
+ },
1485
+ {
1486
+ "epoch": 1.09,
1487
+ "learning_rate": 0.0007334871794871795,
1488
+ "loss": 0.0806,
1489
+ "step": 5700
1490
+ },
1491
+ {
1492
+ "epoch": 1.09,
1493
+ "learning_rate": 0.0007322051282051281,
1494
+ "loss": 0.0743,
1495
+ "step": 5725
1496
+ },
1497
+ {
1498
+ "epoch": 1.09,
1499
+ "learning_rate": 0.0007309230769230769,
1500
+ "loss": 0.0764,
1501
+ "step": 5750
1502
+ },
1503
+ {
1504
+ "epoch": 1.09,
1505
+ "learning_rate": 0.0007296410256410256,
1506
+ "loss": 0.0785,
1507
+ "step": 5775
1508
+ },
1509
+ {
1510
+ "epoch": 1.09,
1511
+ "learning_rate": 0.0007283589743589744,
1512
+ "loss": 0.1074,
1513
+ "step": 5800
1514
+ },
1515
+ {
1516
+ "epoch": 1.09,
1517
+ "learning_rate": 0.0007270769230769231,
1518
+ "loss": 0.085,
1519
+ "step": 5825
1520
+ },
1521
+ {
1522
+ "epoch": 1.09,
1523
+ "learning_rate": 0.0007257948717948718,
1524
+ "loss": 0.0737,
1525
+ "step": 5850
1526
+ },
1527
+ {
1528
+ "epoch": 1.09,
1529
+ "learning_rate": 0.0007245128205128205,
1530
+ "loss": 0.0804,
1531
+ "step": 5875
1532
+ },
1533
+ {
1534
+ "epoch": 1.1,
1535
+ "learning_rate": 0.0007232307692307692,
1536
+ "loss": 0.0889,
1537
+ "step": 5900
1538
+ },
1539
+ {
1540
+ "epoch": 1.1,
1541
+ "learning_rate": 0.000721948717948718,
1542
+ "loss": 0.0962,
1543
+ "step": 5925
1544
+ },
1545
+ {
1546
+ "epoch": 1.1,
1547
+ "learning_rate": 0.0007206666666666667,
1548
+ "loss": 0.0869,
1549
+ "step": 5950
1550
+ },
1551
+ {
1552
+ "epoch": 1.1,
1553
+ "learning_rate": 0.0007193846153846154,
1554
+ "loss": 0.0913,
1555
+ "step": 5975
1556
+ },
1557
+ {
1558
+ "epoch": 1.1,
1559
+ "learning_rate": 0.0007181025641025641,
1560
+ "loss": 0.086,
1561
+ "step": 6000
1562
+ },
1563
+ {
1564
+ "epoch": 1.1,
1565
+ "eval_loss": 0.09825620800256729,
1566
+ "eval_runtime": 217.8898,
1567
+ "eval_samples_per_second": 4.589,
1568
+ "eval_steps_per_second": 0.289,
1569
+ "step": 6000
1570
+ },
1571
+ {
1572
+ "epoch": 1.1,
1573
+ "learning_rate": 0.0007168205128205129,
1574
+ "loss": 0.0859,
1575
+ "step": 6025
1576
+ },
1577
+ {
1578
+ "epoch": 1.1,
1579
+ "learning_rate": 0.0007155384615384616,
1580
+ "loss": 0.0757,
1581
+ "step": 6050
1582
+ },
1583
+ {
1584
+ "epoch": 1.1,
1585
+ "learning_rate": 0.0007142564102564103,
1586
+ "loss": 0.0643,
1587
+ "step": 6075
1588
+ },
1589
+ {
1590
+ "epoch": 1.11,
1591
+ "learning_rate": 0.000712974358974359,
1592
+ "loss": 0.0771,
1593
+ "step": 6100
1594
+ },
1595
+ {
1596
+ "epoch": 1.11,
1597
+ "learning_rate": 0.0007116923076923077,
1598
+ "loss": 0.0666,
1599
+ "step": 6125
1600
+ },
1601
+ {
1602
+ "epoch": 1.11,
1603
+ "learning_rate": 0.0007104102564102565,
1604
+ "loss": 0.0963,
1605
+ "step": 6150
1606
+ },
1607
+ {
1608
+ "epoch": 1.11,
1609
+ "learning_rate": 0.0007091282051282052,
1610
+ "loss": 0.1062,
1611
+ "step": 6175
1612
+ },
1613
+ {
1614
+ "epoch": 1.11,
1615
+ "learning_rate": 0.0007078461538461538,
1616
+ "loss": 0.0881,
1617
+ "step": 6200
1618
+ },
1619
+ {
1620
+ "epoch": 1.11,
1621
+ "learning_rate": 0.0007065641025641025,
1622
+ "loss": 0.0587,
1623
+ "step": 6225
1624
+ },
1625
+ {
1626
+ "epoch": 1.11,
1627
+ "learning_rate": 0.0007052820512820512,
1628
+ "loss": 0.0673,
1629
+ "step": 6250
1630
+ },
1631
+ {
1632
+ "epoch": 1.11,
1633
+ "learning_rate": 0.000704,
1634
+ "loss": 0.069,
1635
+ "step": 6275
1636
+ },
1637
+ {
1638
+ "epoch": 1.12,
1639
+ "learning_rate": 0.0007027179487179487,
1640
+ "loss": 0.0673,
1641
+ "step": 6300
1642
+ },
1643
+ {
1644
+ "epoch": 1.12,
1645
+ "learning_rate": 0.0007014358974358974,
1646
+ "loss": 0.0513,
1647
+ "step": 6325
1648
+ },
1649
+ {
1650
+ "epoch": 1.12,
1651
+ "learning_rate": 0.0007001538461538461,
1652
+ "loss": 0.0614,
1653
+ "step": 6350
1654
+ },
1655
+ {
1656
+ "epoch": 1.12,
1657
+ "learning_rate": 0.0006988717948717949,
1658
+ "loss": 0.0498,
1659
+ "step": 6375
1660
+ },
1661
+ {
1662
+ "epoch": 1.12,
1663
+ "learning_rate": 0.0006975897435897436,
1664
+ "loss": 0.0635,
1665
+ "step": 6400
1666
+ },
1667
+ {
1668
+ "epoch": 1.12,
1669
+ "eval_loss": 0.09686123579740524,
1670
+ "eval_runtime": 217.5581,
1671
+ "eval_samples_per_second": 4.596,
1672
+ "eval_steps_per_second": 0.29,
1673
+ "step": 6400
1674
+ },
1675
+ {
1676
+ "epoch": 1.12,
1677
+ "learning_rate": 0.0006963076923076924,
1678
+ "loss": 0.0656,
1679
+ "step": 6425
1680
+ },
1681
+ {
1682
+ "epoch": 1.12,
1683
+ "learning_rate": 0.000695025641025641,
1684
+ "loss": 0.0574,
1685
+ "step": 6450
1686
+ },
1687
+ {
1688
+ "epoch": 1.12,
1689
+ "learning_rate": 0.0006937435897435897,
1690
+ "loss": 0.0583,
1691
+ "step": 6475
1692
+ },
1693
+ {
1694
+ "epoch": 1.13,
1695
+ "learning_rate": 0.0006924615384615385,
1696
+ "loss": 0.0594,
1697
+ "step": 6500
1698
+ },
1699
+ {
1700
+ "epoch": 1.13,
1701
+ "learning_rate": 0.0006911794871794872,
1702
+ "loss": 0.0607,
1703
+ "step": 6525
1704
+ },
1705
+ {
1706
+ "epoch": 1.13,
1707
+ "learning_rate": 0.000689897435897436,
1708
+ "loss": 0.0544,
1709
+ "step": 6550
1710
+ },
1711
+ {
1712
+ "epoch": 1.13,
1713
+ "learning_rate": 0.0006886153846153846,
1714
+ "loss": 0.0466,
1715
+ "step": 6575
1716
+ },
1717
+ {
1718
+ "epoch": 1.13,
1719
+ "learning_rate": 0.0006873333333333334,
1720
+ "loss": 0.0574,
1721
+ "step": 6600
1722
+ },
1723
+ {
1724
+ "epoch": 1.13,
1725
+ "learning_rate": 0.0006860512820512821,
1726
+ "loss": 0.0655,
1727
+ "step": 6625
1728
+ },
1729
+ {
1730
+ "epoch": 1.13,
1731
+ "learning_rate": 0.0006847692307692308,
1732
+ "loss": 0.051,
1733
+ "step": 6650
1734
+ },
1735
+ {
1736
+ "epoch": 1.13,
1737
+ "learning_rate": 0.0006834871794871796,
1738
+ "loss": 0.0582,
1739
+ "step": 6675
1740
+ },
1741
+ {
1742
+ "epoch": 1.14,
1743
+ "learning_rate": 0.0006822051282051282,
1744
+ "loss": 0.0593,
1745
+ "step": 6700
1746
+ },
1747
+ {
1748
+ "epoch": 1.14,
1749
+ "learning_rate": 0.000680923076923077,
1750
+ "loss": 0.0597,
1751
+ "step": 6725
1752
+ },
1753
+ {
1754
+ "epoch": 1.14,
1755
+ "learning_rate": 0.0006796410256410257,
1756
+ "loss": 0.0603,
1757
+ "step": 6750
1758
+ },
1759
+ {
1760
+ "epoch": 1.14,
1761
+ "learning_rate": 0.0006783589743589745,
1762
+ "loss": 0.0655,
1763
+ "step": 6775
1764
+ },
1765
+ {
1766
+ "epoch": 1.14,
1767
+ "learning_rate": 0.0006770769230769231,
1768
+ "loss": 0.0664,
1769
+ "step": 6800
1770
+ },
1771
+ {
1772
+ "epoch": 1.14,
1773
+ "eval_loss": 0.0915135070681572,
1774
+ "eval_runtime": 217.3938,
1775
+ "eval_samples_per_second": 4.6,
1776
+ "eval_steps_per_second": 0.29,
1777
+ "step": 6800
1778
+ },
1779
+ {
1780
+ "epoch": 1.14,
1781
+ "learning_rate": 0.0006757948717948717,
1782
+ "loss": 0.0604,
1783
+ "step": 6825
1784
+ },
1785
+ {
1786
+ "epoch": 1.14,
1787
+ "learning_rate": 0.0006745128205128205,
1788
+ "loss": 0.0765,
1789
+ "step": 6850
1790
+ },
1791
+ {
1792
+ "epoch": 1.14,
1793
+ "learning_rate": 0.0006732307692307692,
1794
+ "loss": 0.0622,
1795
+ "step": 6875
1796
+ },
1797
+ {
1798
+ "epoch": 1.15,
1799
+ "learning_rate": 0.000671948717948718,
1800
+ "loss": 0.0572,
1801
+ "step": 6900
1802
+ },
1803
+ {
1804
+ "epoch": 1.15,
1805
+ "learning_rate": 0.0006706666666666666,
1806
+ "loss": 0.0423,
1807
+ "step": 6925
1808
+ },
1809
+ {
1810
+ "epoch": 1.15,
1811
+ "learning_rate": 0.0006693846153846154,
1812
+ "loss": 0.0504,
1813
+ "step": 6950
1814
+ },
1815
+ {
1816
+ "epoch": 1.15,
1817
+ "learning_rate": 0.0006681025641025641,
1818
+ "loss": 0.0747,
1819
+ "step": 6975
1820
+ },
1821
+ {
1822
+ "epoch": 1.15,
1823
+ "learning_rate": 0.0006668205128205128,
1824
+ "loss": 0.0651,
1825
+ "step": 7000
1826
+ },
1827
+ {
1828
+ "epoch": 1.15,
1829
+ "learning_rate": 0.0006655384615384616,
1830
+ "loss": 0.0549,
1831
+ "step": 7025
1832
+ },
1833
+ {
1834
+ "epoch": 1.15,
1835
+ "learning_rate": 0.0006642564102564102,
1836
+ "loss": 0.0512,
1837
+ "step": 7050
1838
+ },
1839
+ {
1840
+ "epoch": 1.15,
1841
+ "learning_rate": 0.000662974358974359,
1842
+ "loss": 0.067,
1843
+ "step": 7075
1844
+ },
1845
+ {
1846
+ "epoch": 1.16,
1847
+ "learning_rate": 0.0006616923076923077,
1848
+ "loss": 0.0638,
1849
+ "step": 7100
1850
+ },
1851
+ {
1852
+ "epoch": 1.16,
1853
+ "learning_rate": 0.0006604102564102565,
1854
+ "loss": 0.0512,
1855
+ "step": 7125
1856
+ },
1857
+ {
1858
+ "epoch": 1.16,
1859
+ "learning_rate": 0.0006591282051282051,
1860
+ "loss": 0.0557,
1861
+ "step": 7150
1862
+ },
1863
+ {
1864
+ "epoch": 1.16,
1865
+ "learning_rate": 0.0006578461538461538,
1866
+ "loss": 0.0497,
1867
+ "step": 7175
1868
+ },
1869
+ {
1870
+ "epoch": 1.16,
1871
+ "learning_rate": 0.0006565641025641026,
1872
+ "loss": 0.0605,
1873
+ "step": 7200
1874
+ },
1875
+ {
1876
+ "epoch": 1.16,
1877
+ "eval_loss": 0.09085912257432938,
1878
+ "eval_runtime": 217.4684,
1879
+ "eval_samples_per_second": 4.598,
1880
+ "eval_steps_per_second": 0.29,
1881
+ "step": 7200
1882
+ },
1883
+ {
1884
+ "epoch": 1.16,
1885
+ "learning_rate": 0.0006552820512820513,
1886
+ "loss": 0.0573,
1887
+ "step": 7225
1888
+ },
1889
+ {
1890
+ "epoch": 1.16,
1891
+ "learning_rate": 0.0006540000000000001,
1892
+ "loss": 0.0714,
1893
+ "step": 7250
1894
+ },
1895
+ {
1896
+ "epoch": 1.16,
1897
+ "learning_rate": 0.0006527179487179487,
1898
+ "loss": 0.0813,
1899
+ "step": 7275
1900
+ },
1901
+ {
1902
+ "epoch": 1.17,
1903
+ "learning_rate": 0.0006514358974358975,
1904
+ "loss": 0.0664,
1905
+ "step": 7300
1906
+ },
1907
+ {
1908
+ "epoch": 1.17,
1909
+ "learning_rate": 0.0006501538461538462,
1910
+ "loss": 0.0789,
1911
+ "step": 7325
1912
+ },
1913
+ {
1914
+ "epoch": 1.17,
1915
+ "learning_rate": 0.0006488717948717949,
1916
+ "loss": 0.0711,
1917
+ "step": 7350
1918
+ },
1919
+ {
1920
+ "epoch": 1.17,
1921
+ "learning_rate": 0.0006475897435897437,
1922
+ "loss": 0.0635,
1923
+ "step": 7375
1924
+ },
1925
+ {
1926
+ "epoch": 1.17,
1927
+ "learning_rate": 0.0006463076923076923,
1928
+ "loss": 0.0794,
1929
+ "step": 7400
1930
+ },
1931
+ {
1932
+ "epoch": 1.17,
1933
+ "learning_rate": 0.000645025641025641,
1934
+ "loss": 0.0742,
1935
+ "step": 7425
1936
+ },
1937
+ {
1938
+ "epoch": 1.17,
1939
+ "learning_rate": 0.0006437435897435897,
1940
+ "loss": 0.0763,
1941
+ "step": 7450
1942
+ },
1943
+ {
1944
+ "epoch": 1.17,
1945
+ "learning_rate": 0.0006424615384615385,
1946
+ "loss": 0.0783,
1947
+ "step": 7475
1948
+ },
1949
+ {
1950
+ "epoch": 1.18,
1951
+ "learning_rate": 0.0006411794871794871,
1952
+ "loss": 0.1092,
1953
+ "step": 7500
1954
+ },
1955
+ {
1956
+ "epoch": 1.18,
1957
+ "learning_rate": 0.0006398974358974358,
1958
+ "loss": 0.0765,
1959
+ "step": 7525
1960
+ },
1961
+ {
1962
+ "epoch": 1.18,
1963
+ "learning_rate": 0.0006386153846153846,
1964
+ "loss": 0.0926,
1965
+ "step": 7550
1966
+ },
1967
+ {
1968
+ "epoch": 1.18,
1969
+ "learning_rate": 0.0006373333333333333,
1970
+ "loss": 0.0669,
1971
+ "step": 7575
1972
+ },
1973
+ {
1974
+ "epoch": 1.18,
1975
+ "learning_rate": 0.0006360512820512821,
1976
+ "loss": 0.0645,
1977
+ "step": 7600
1978
+ },
1979
+ {
1980
+ "epoch": 1.18,
1981
+ "eval_loss": 0.10231851041316986,
1982
+ "eval_runtime": 216.1204,
1983
+ "eval_samples_per_second": 4.627,
1984
+ "eval_steps_per_second": 0.292,
1985
+ "step": 7600
1986
+ },
1987
+ {
1988
+ "epoch": 1.18,
1989
+ "learning_rate": 0.0006347692307692307,
1990
+ "loss": 0.0898,
1991
+ "step": 7625
1992
+ },
1993
+ {
1994
+ "epoch": 1.18,
1995
+ "learning_rate": 0.0006334871794871795,
1996
+ "loss": 0.103,
1997
+ "step": 7650
1998
+ },
1999
+ {
2000
+ "epoch": 1.18,
2001
+ "learning_rate": 0.0006322051282051282,
2002
+ "loss": 0.1355,
2003
+ "step": 7675
2004
+ },
2005
+ {
2006
+ "epoch": 1.19,
2007
+ "learning_rate": 0.0006309230769230769,
2008
+ "loss": 0.0953,
2009
+ "step": 7700
2010
+ },
2011
+ {
2012
+ "epoch": 1.19,
2013
+ "learning_rate": 0.0006296410256410257,
2014
+ "loss": 0.0807,
2015
+ "step": 7725
2016
+ },
2017
+ {
2018
+ "epoch": 1.19,
2019
+ "learning_rate": 0.0006283589743589743,
2020
+ "loss": 0.069,
2021
+ "step": 7750
2022
+ },
2023
+ {
2024
+ "epoch": 1.19,
2025
+ "learning_rate": 0.0006271282051282051,
2026
+ "loss": 0.0823,
2027
+ "step": 7775
2028
+ },
2029
+ {
2030
+ "epoch": 1.19,
2031
+ "learning_rate": 0.0006258461538461538,
2032
+ "loss": 0.0687,
2033
+ "step": 7800
2034
+ },
2035
+ {
2036
+ "epoch": 1.19,
2037
+ "learning_rate": 0.0006245641025641026,
2038
+ "loss": 0.0701,
2039
+ "step": 7825
2040
+ },
2041
+ {
2042
+ "epoch": 1.19,
2043
+ "learning_rate": 0.0006232820512820513,
2044
+ "loss": 0.089,
2045
+ "step": 7850
2046
+ },
2047
+ {
2048
+ "epoch": 1.19,
2049
+ "learning_rate": 0.000622,
2050
+ "loss": 0.0734,
2051
+ "step": 7875
2052
+ },
2053
+ {
2054
+ "epoch": 1.2,
2055
+ "learning_rate": 0.0006207179487179487,
2056
+ "loss": 0.0798,
2057
+ "step": 7900
2058
+ },
2059
+ {
2060
+ "epoch": 1.2,
2061
+ "learning_rate": 0.0006194358974358975,
2062
+ "loss": 0.0835,
2063
+ "step": 7925
2064
+ },
2065
+ {
2066
+ "epoch": 1.2,
2067
+ "learning_rate": 0.0006181538461538462,
2068
+ "loss": 0.1016,
2069
+ "step": 7950
2070
+ },
2071
+ {
2072
+ "epoch": 2.0,
2073
+ "learning_rate": 0.0006168717948717949,
2074
+ "loss": 0.1292,
2075
+ "step": 7975
2076
+ },
2077
+ {
2078
+ "epoch": 2.0,
2079
+ "learning_rate": 0.0006155897435897436,
2080
+ "loss": 0.1077,
2081
+ "step": 8000
2082
+ },
2083
+ {
2084
+ "epoch": 2.0,
2085
+ "eval_loss": 0.8477774858474731,
2086
+ "eval_runtime": 216.383,
2087
+ "eval_samples_per_second": 4.621,
2088
+ "eval_steps_per_second": 0.291,
2089
+ "step": 8000
2090
+ },
2091
+ {
2092
+ "epoch": 2.0,
2093
+ "learning_rate": 0.0006143076923076923,
2094
+ "loss": 0.109,
2095
+ "step": 8025
2096
+ },
2097
+ {
2098
+ "epoch": 2.0,
2099
+ "learning_rate": 0.0006130256410256411,
2100
+ "loss": 0.1169,
2101
+ "step": 8050
2102
+ },
2103
+ {
2104
+ "epoch": 2.01,
2105
+ "learning_rate": 0.0006117435897435898,
2106
+ "loss": 0.1108,
2107
+ "step": 8075
2108
+ },
2109
+ {
2110
+ "epoch": 2.01,
2111
+ "learning_rate": 0.0006104615384615386,
2112
+ "loss": 0.1014,
2113
+ "step": 8100
2114
+ },
2115
+ {
2116
+ "epoch": 2.01,
2117
+ "learning_rate": 0.0006091794871794872,
2118
+ "loss": 0.0945,
2119
+ "step": 8125
2120
+ },
2121
+ {
2122
+ "epoch": 2.01,
2123
+ "learning_rate": 0.0006078974358974359,
2124
+ "loss": 0.1005,
2125
+ "step": 8150
2126
+ },
2127
+ {
2128
+ "epoch": 2.01,
2129
+ "learning_rate": 0.0006066153846153847,
2130
+ "loss": 0.0888,
2131
+ "step": 8175
2132
+ },
2133
+ {
2134
+ "epoch": 2.01,
2135
+ "learning_rate": 0.0006053333333333333,
2136
+ "loss": 0.0959,
2137
+ "step": 8200
2138
+ },
2139
+ {
2140
+ "epoch": 2.01,
2141
+ "learning_rate": 0.000604051282051282,
2142
+ "loss": 0.1038,
2143
+ "step": 8225
2144
+ },
2145
+ {
2146
+ "epoch": 2.01,
2147
+ "learning_rate": 0.0006027692307692307,
2148
+ "loss": 0.0909,
2149
+ "step": 8250
2150
+ },
2151
+ {
2152
+ "epoch": 2.02,
2153
+ "learning_rate": 0.0006014871794871795,
2154
+ "loss": 0.0994,
2155
+ "step": 8275
2156
+ },
2157
+ {
2158
+ "epoch": 2.02,
2159
+ "learning_rate": 0.0006002051282051282,
2160
+ "loss": 0.0996,
2161
+ "step": 8300
2162
+ },
2163
+ {
2164
+ "epoch": 2.02,
2165
+ "learning_rate": 0.0005989230769230769,
2166
+ "loss": 0.0922,
2167
+ "step": 8325
2168
+ },
2169
+ {
2170
+ "epoch": 2.02,
2171
+ "learning_rate": 0.0005976410256410256,
2172
+ "loss": 0.099,
2173
+ "step": 8350
2174
+ },
2175
+ {
2176
+ "epoch": 2.02,
2177
+ "learning_rate": 0.0005963589743589743,
2178
+ "loss": 0.0924,
2179
+ "step": 8375
2180
+ },
2181
+ {
2182
+ "epoch": 2.02,
2183
+ "learning_rate": 0.0005950769230769231,
2184
+ "loss": 0.0891,
2185
+ "step": 8400
2186
+ },
2187
+ {
2188
+ "epoch": 2.02,
2189
+ "eval_loss": 0.8391351699829102,
2190
+ "eval_runtime": 216.0468,
2191
+ "eval_samples_per_second": 4.629,
2192
+ "eval_steps_per_second": 0.292,
2193
+ "step": 8400
2194
+ },
2195
+ {
2196
+ "epoch": 2.02,
2197
+ "learning_rate": 0.0005937948717948718,
2198
+ "loss": 0.0856,
2199
+ "step": 8425
2200
+ },
2201
+ {
2202
+ "epoch": 2.02,
2203
+ "learning_rate": 0.0005925128205128206,
2204
+ "loss": 0.082,
2205
+ "step": 8450
2206
+ },
2207
+ {
2208
+ "epoch": 2.03,
2209
+ "learning_rate": 0.0005912307692307692,
2210
+ "loss": 0.0887,
2211
+ "step": 8475
2212
+ },
2213
+ {
2214
+ "epoch": 2.03,
2215
+ "learning_rate": 0.0005899487179487179,
2216
+ "loss": 0.0836,
2217
+ "step": 8500
2218
+ },
2219
+ {
2220
+ "epoch": 2.03,
2221
+ "learning_rate": 0.0005886666666666667,
2222
+ "loss": 0.0841,
2223
+ "step": 8525
2224
+ },
2225
+ {
2226
+ "epoch": 2.03,
2227
+ "learning_rate": 0.0005873846153846154,
2228
+ "loss": 0.0814,
2229
+ "step": 8550
2230
+ },
2231
+ {
2232
+ "epoch": 2.03,
2233
+ "learning_rate": 0.0005861025641025642,
2234
+ "loss": 0.0903,
2235
+ "step": 8575
2236
+ },
2237
+ {
2238
+ "epoch": 2.03,
2239
+ "learning_rate": 0.0005848205128205128,
2240
+ "loss": 0.0846,
2241
+ "step": 8600
2242
+ },
2243
+ {
2244
+ "epoch": 2.03,
2245
+ "learning_rate": 0.0005835384615384616,
2246
+ "loss": 0.0759,
2247
+ "step": 8625
2248
+ },
2249
+ {
2250
+ "epoch": 2.03,
2251
+ "learning_rate": 0.0005822564102564103,
2252
+ "loss": 0.0817,
2253
+ "step": 8650
2254
+ },
2255
+ {
2256
+ "epoch": 2.04,
2257
+ "learning_rate": 0.000580974358974359,
2258
+ "loss": 0.0768,
2259
+ "step": 8675
2260
+ },
2261
+ {
2262
+ "epoch": 2.04,
2263
+ "learning_rate": 0.0005796923076923078,
2264
+ "loss": 0.0718,
2265
+ "step": 8700
2266
+ },
2267
+ {
2268
+ "epoch": 2.04,
2269
+ "learning_rate": 0.0005784102564102564,
2270
+ "loss": 0.0737,
2271
+ "step": 8725
2272
+ },
2273
+ {
2274
+ "epoch": 2.04,
2275
+ "learning_rate": 0.0005771282051282052,
2276
+ "loss": 0.0712,
2277
+ "step": 8750
2278
+ },
2279
+ {
2280
+ "epoch": 2.04,
2281
+ "learning_rate": 0.0005758461538461539,
2282
+ "loss": 0.0768,
2283
+ "step": 8775
2284
+ },
2285
+ {
2286
+ "epoch": 2.04,
2287
+ "learning_rate": 0.0005745641025641027,
2288
+ "loss": 0.0803,
2289
+ "step": 8800
2290
+ },
2291
+ {
2292
+ "epoch": 2.04,
2293
+ "eval_loss": 0.759874165058136,
2294
+ "eval_runtime": 215.1962,
2295
+ "eval_samples_per_second": 4.647,
2296
+ "eval_steps_per_second": 0.293,
2297
+ "step": 8800
2298
+ },
2299
+ {
2300
+ "epoch": 2.04,
2301
+ "learning_rate": 0.0005732820512820512,
2302
+ "loss": 0.068,
2303
+ "step": 8825
2304
+ },
2305
+ {
2306
+ "epoch": 2.04,
2307
+ "learning_rate": 0.0005719999999999999,
2308
+ "loss": 0.0679,
2309
+ "step": 8850
2310
+ },
2311
+ {
2312
+ "epoch": 2.05,
2313
+ "learning_rate": 0.0005707179487179487,
2314
+ "loss": 0.0687,
2315
+ "step": 8875
2316
+ },
2317
+ {
2318
+ "epoch": 2.05,
2319
+ "learning_rate": 0.0005694358974358974,
2320
+ "loss": 0.0675,
2321
+ "step": 8900
2322
+ },
2323
+ {
2324
+ "epoch": 2.05,
2325
+ "learning_rate": 0.0005681538461538462,
2326
+ "loss": 0.0736,
2327
+ "step": 8925
2328
+ },
2329
+ {
2330
+ "epoch": 2.05,
2331
+ "learning_rate": 0.0005668717948717948,
2332
+ "loss": 0.0737,
2333
+ "step": 8950
2334
+ },
2335
+ {
2336
+ "epoch": 2.05,
2337
+ "learning_rate": 0.0005655897435897436,
2338
+ "loss": 0.0737,
2339
+ "step": 8975
2340
+ },
2341
+ {
2342
+ "epoch": 2.05,
2343
+ "learning_rate": 0.0005643076923076923,
2344
+ "loss": 0.0674,
2345
+ "step": 9000
2346
+ },
2347
+ {
2348
+ "epoch": 2.05,
2349
+ "learning_rate": 0.000563025641025641,
2350
+ "loss": 0.0703,
2351
+ "step": 9025
2352
+ },
2353
+ {
2354
+ "epoch": 2.05,
2355
+ "learning_rate": 0.0005617435897435898,
2356
+ "loss": 0.0698,
2357
+ "step": 9050
2358
+ },
2359
+ {
2360
+ "epoch": 2.06,
2361
+ "learning_rate": 0.0005604615384615384,
2362
+ "loss": 0.0723,
2363
+ "step": 9075
2364
+ },
2365
+ {
2366
+ "epoch": 2.06,
2367
+ "learning_rate": 0.0005591794871794872,
2368
+ "loss": 0.0688,
2369
+ "step": 9100
2370
+ },
2371
+ {
2372
+ "epoch": 2.06,
2373
+ "learning_rate": 0.0005578974358974359,
2374
+ "loss": 0.0664,
2375
+ "step": 9125
2376
+ },
2377
+ {
2378
+ "epoch": 2.06,
2379
+ "learning_rate": 0.0005566153846153847,
2380
+ "loss": 0.0677,
2381
+ "step": 9150
2382
+ },
2383
+ {
2384
+ "epoch": 2.06,
2385
+ "learning_rate": 0.0005553333333333334,
2386
+ "loss": 0.0624,
2387
+ "step": 9175
2388
+ },
2389
+ {
2390
+ "epoch": 2.06,
2391
+ "learning_rate": 0.000554051282051282,
2392
+ "loss": 0.068,
2393
+ "step": 9200
2394
+ },
2395
+ {
2396
+ "epoch": 2.06,
2397
+ "eval_loss": 0.77803635597229,
2398
+ "eval_runtime": 216.146,
2399
+ "eval_samples_per_second": 4.627,
2400
+ "eval_steps_per_second": 0.291,
2401
+ "step": 9200
2402
+ },
2403
+ {
2404
+ "epoch": 2.06,
2405
+ "learning_rate": 0.0005527692307692308,
2406
+ "loss": 0.0717,
2407
+ "step": 9225
2408
+ },
2409
+ {
2410
+ "epoch": 2.06,
2411
+ "learning_rate": 0.0005514871794871795,
2412
+ "loss": 0.0692,
2413
+ "step": 9250
2414
+ },
2415
+ {
2416
+ "epoch": 2.07,
2417
+ "learning_rate": 0.0005502051282051283,
2418
+ "loss": 0.0657,
2419
+ "step": 9275
2420
+ },
2421
+ {
2422
+ "epoch": 2.07,
2423
+ "learning_rate": 0.000548923076923077,
2424
+ "loss": 0.064,
2425
+ "step": 9300
2426
+ },
2427
+ {
2428
+ "epoch": 2.07,
2429
+ "learning_rate": 0.0005476410256410257,
2430
+ "loss": 0.0661,
2431
+ "step": 9325
2432
+ },
2433
+ {
2434
+ "epoch": 2.07,
2435
+ "learning_rate": 0.0005463589743589744,
2436
+ "loss": 0.0604,
2437
+ "step": 9350
2438
+ },
2439
+ {
2440
+ "epoch": 2.07,
2441
+ "learning_rate": 0.0005450769230769232,
2442
+ "loss": 0.066,
2443
+ "step": 9375
2444
+ },
2445
+ {
2446
+ "epoch": 2.07,
2447
+ "learning_rate": 0.0005437948717948719,
2448
+ "loss": 0.0617,
2449
+ "step": 9400
2450
+ },
2451
+ {
2452
+ "epoch": 2.07,
2453
+ "learning_rate": 0.0005425128205128204,
2454
+ "loss": 0.0622,
2455
+ "step": 9425
2456
+ },
2457
+ {
2458
+ "epoch": 2.07,
2459
+ "learning_rate": 0.0005412307692307692,
2460
+ "loss": 0.056,
2461
+ "step": 9450
2462
+ },
2463
+ {
2464
+ "epoch": 2.08,
2465
+ "learning_rate": 0.0005399487179487179,
2466
+ "loss": 0.0621,
2467
+ "step": 9475
2468
+ },
2469
+ {
2470
+ "epoch": 2.08,
2471
+ "learning_rate": 0.0005386666666666667,
2472
+ "loss": 0.0578,
2473
+ "step": 9500
2474
+ },
2475
+ {
2476
+ "epoch": 2.08,
2477
+ "learning_rate": 0.0005373846153846154,
2478
+ "loss": 0.0526,
2479
+ "step": 9525
2480
+ },
2481
+ {
2482
+ "epoch": 2.08,
2483
+ "learning_rate": 0.0005361025641025641,
2484
+ "loss": 0.0703,
2485
+ "step": 9550
2486
+ },
2487
+ {
2488
+ "epoch": 2.08,
2489
+ "learning_rate": 0.0005348205128205128,
2490
+ "loss": 0.0629,
2491
+ "step": 9575
2492
+ },
2493
+ {
2494
+ "epoch": 2.08,
2495
+ "learning_rate": 0.0005335384615384615,
2496
+ "loss": 0.0723,
2497
+ "step": 9600
2498
+ },
2499
+ {
2500
+ "epoch": 2.08,
2501
+ "eval_loss": 0.09210965782403946,
2502
+ "eval_runtime": 216.3525,
2503
+ "eval_samples_per_second": 4.622,
2504
+ "eval_steps_per_second": 0.291,
2505
+ "step": 9600
2506
+ },
2507
+ {
2508
+ "epoch": 2.08,
2509
+ "learning_rate": 0.0005322564102564103,
2510
+ "loss": 0.0606,
2511
+ "step": 9625
2512
+ },
2513
+ {
2514
+ "epoch": 2.08,
2515
+ "learning_rate": 0.000530974358974359,
2516
+ "loss": 0.0577,
2517
+ "step": 9650
2518
+ },
2519
+ {
2520
+ "epoch": 2.09,
2521
+ "learning_rate": 0.0005296923076923077,
2522
+ "loss": 0.0595,
2523
+ "step": 9675
2524
+ },
2525
+ {
2526
+ "epoch": 2.09,
2527
+ "learning_rate": 0.0005284102564102564,
2528
+ "loss": 0.0561,
2529
+ "step": 9700
2530
+ },
2531
+ {
2532
+ "epoch": 2.09,
2533
+ "learning_rate": 0.0005271282051282052,
2534
+ "loss": 0.0558,
2535
+ "step": 9725
2536
+ },
2537
+ {
2538
+ "epoch": 2.09,
2539
+ "learning_rate": 0.0005258461538461539,
2540
+ "loss": 0.0562,
2541
+ "step": 9750
2542
+ },
2543
+ {
2544
+ "epoch": 2.09,
2545
+ "learning_rate": 0.0005245641025641025,
2546
+ "loss": 0.0756,
2547
+ "step": 9775
2548
+ },
2549
+ {
2550
+ "epoch": 2.09,
2551
+ "learning_rate": 0.0005232820512820513,
2552
+ "loss": 0.0624,
2553
+ "step": 9800
2554
+ },
2555
+ {
2556
+ "epoch": 2.09,
2557
+ "learning_rate": 0.000522,
2558
+ "loss": 0.0532,
2559
+ "step": 9825
2560
+ },
2561
+ {
2562
+ "epoch": 2.09,
2563
+ "learning_rate": 0.0005207179487179488,
2564
+ "loss": 0.0546,
2565
+ "step": 9850
2566
+ },
2567
+ {
2568
+ "epoch": 2.1,
2569
+ "learning_rate": 0.0005194358974358975,
2570
+ "loss": 0.0603,
2571
+ "step": 9875
2572
+ },
2573
+ {
2574
+ "epoch": 2.1,
2575
+ "learning_rate": 0.0005181538461538462,
2576
+ "loss": 0.0633,
2577
+ "step": 9900
2578
+ },
2579
+ {
2580
+ "epoch": 2.1,
2581
+ "learning_rate": 0.0005168717948717949,
2582
+ "loss": 0.0598,
2583
+ "step": 9925
2584
+ },
2585
+ {
2586
+ "epoch": 2.1,
2587
+ "learning_rate": 0.0005155897435897436,
2588
+ "loss": 0.0635,
2589
+ "step": 9950
2590
+ },
2591
+ {
2592
+ "epoch": 2.1,
2593
+ "learning_rate": 0.0005143076923076924,
2594
+ "loss": 0.0587,
2595
+ "step": 9975
2596
+ },
2597
+ {
2598
+ "epoch": 2.1,
2599
+ "learning_rate": 0.0005130256410256411,
2600
+ "loss": 0.0615,
2601
+ "step": 10000
2602
+ },
2603
+ {
2604
+ "epoch": 2.1,
2605
+ "eval_loss": 0.09180190414190292,
2606
+ "eval_runtime": 215.773,
2607
+ "eval_samples_per_second": 4.635,
2608
+ "eval_steps_per_second": 0.292,
2609
+ "step": 10000
2610
+ },
2611
+ {
2612
+ "epoch": 2.1,
2613
+ "learning_rate": 0.0005117435897435897,
2614
+ "loss": 0.0519,
2615
+ "step": 10025
2616
+ },
2617
+ {
2618
+ "epoch": 2.1,
2619
+ "learning_rate": 0.0005104615384615384,
2620
+ "loss": 0.0445,
2621
+ "step": 10050
2622
+ },
2623
+ {
2624
+ "epoch": 2.11,
2625
+ "learning_rate": 0.0005091794871794872,
2626
+ "loss": 0.0528,
2627
+ "step": 10075
2628
+ },
2629
+ {
2630
+ "epoch": 2.11,
2631
+ "learning_rate": 0.0005078974358974359,
2632
+ "loss": 0.0458,
2633
+ "step": 10100
2634
+ },
2635
+ {
2636
+ "epoch": 2.11,
2637
+ "learning_rate": 0.0005066153846153845,
2638
+ "loss": 0.0629,
2639
+ "step": 10125
2640
+ },
2641
+ {
2642
+ "epoch": 2.11,
2643
+ "learning_rate": 0.0005053333333333333,
2644
+ "loss": 0.08,
2645
+ "step": 10150
2646
+ },
2647
+ {
2648
+ "epoch": 2.11,
2649
+ "learning_rate": 0.000504051282051282,
2650
+ "loss": 0.0623,
2651
+ "step": 10175
2652
+ },
2653
+ {
2654
+ "epoch": 2.11,
2655
+ "learning_rate": 0.0005027692307692308,
2656
+ "loss": 0.0479,
2657
+ "step": 10200
2658
+ },
2659
+ {
2660
+ "epoch": 2.11,
2661
+ "learning_rate": 0.0005014871794871795,
2662
+ "loss": 0.052,
2663
+ "step": 10225
2664
+ },
2665
+ {
2666
+ "epoch": 2.11,
2667
+ "learning_rate": 0.0005002051282051282,
2668
+ "loss": 0.0546,
2669
+ "step": 10250
2670
+ },
2671
+ {
2672
+ "epoch": 2.12,
2673
+ "learning_rate": 0.0004989230769230769,
2674
+ "loss": 0.0495,
2675
+ "step": 10275
2676
+ },
2677
+ {
2678
+ "epoch": 2.12,
2679
+ "learning_rate": 0.0004976410256410256,
2680
+ "loss": 0.0403,
2681
+ "step": 10300
2682
+ },
2683
+ {
2684
+ "epoch": 2.12,
2685
+ "learning_rate": 0.0004963589743589744,
2686
+ "loss": 0.0468,
2687
+ "step": 10325
2688
+ },
2689
+ {
2690
+ "epoch": 2.12,
2691
+ "learning_rate": 0.0004950769230769231,
2692
+ "loss": 0.0393,
2693
+ "step": 10350
2694
+ },
2695
+ {
2696
+ "epoch": 2.12,
2697
+ "learning_rate": 0.0004937948717948718,
2698
+ "loss": 0.0454,
2699
+ "step": 10375
2700
+ },
2701
+ {
2702
+ "epoch": 2.12,
2703
+ "learning_rate": 0.0004925128205128205,
2704
+ "loss": 0.0493,
2705
+ "step": 10400
2706
+ },
2707
+ {
2708
+ "epoch": 2.12,
2709
+ "eval_loss": 0.09357196092605591,
2710
+ "eval_runtime": 218.7372,
2711
+ "eval_samples_per_second": 4.572,
2712
+ "eval_steps_per_second": 0.288,
2713
+ "step": 10400
2714
+ },
2715
+ {
2716
+ "epoch": 2.12,
2717
+ "learning_rate": 0.0004912307692307693,
2718
+ "loss": 0.0402,
2719
+ "step": 10425
2720
+ },
2721
+ {
2722
+ "epoch": 2.12,
2723
+ "learning_rate": 0.000489948717948718,
2724
+ "loss": 0.0379,
2725
+ "step": 10450
2726
+ },
2727
+ {
2728
+ "epoch": 2.13,
2729
+ "learning_rate": 0.0004886666666666667,
2730
+ "loss": 0.0403,
2731
+ "step": 10475
2732
+ },
2733
+ {
2734
+ "epoch": 2.13,
2735
+ "learning_rate": 0.0004873846153846154,
2736
+ "loss": 0.0404,
2737
+ "step": 10500
2738
+ },
2739
+ {
2740
+ "epoch": 2.13,
2741
+ "learning_rate": 0.0004861025641025641,
2742
+ "loss": 0.0376,
2743
+ "step": 10525
2744
+ },
2745
+ {
2746
+ "epoch": 2.13,
2747
+ "learning_rate": 0.0004848205128205128,
2748
+ "loss": 0.0336,
2749
+ "step": 10550
2750
+ },
2751
+ {
2752
+ "epoch": 2.13,
2753
+ "learning_rate": 0.0004835384615384615,
2754
+ "loss": 0.039,
2755
+ "step": 10575
2756
+ },
2757
+ {
2758
+ "epoch": 2.13,
2759
+ "learning_rate": 0.00048225641025641025,
2760
+ "loss": 0.0459,
2761
+ "step": 10600
2762
+ },
2763
+ {
2764
+ "epoch": 2.13,
2765
+ "learning_rate": 0.000480974358974359,
2766
+ "loss": 0.036,
2767
+ "step": 10625
2768
+ },
2769
+ {
2770
+ "epoch": 2.13,
2771
+ "learning_rate": 0.0004796923076923077,
2772
+ "loss": 0.0392,
2773
+ "step": 10650
2774
+ },
2775
+ {
2776
+ "epoch": 2.14,
2777
+ "learning_rate": 0.00047841025641025644,
2778
+ "loss": 0.0443,
2779
+ "step": 10675
2780
+ },
2781
+ {
2782
+ "epoch": 2.14,
2783
+ "learning_rate": 0.00047712820512820517,
2784
+ "loss": 0.0413,
2785
+ "step": 10700
2786
+ },
2787
+ {
2788
+ "epoch": 2.14,
2789
+ "learning_rate": 0.0004758461538461539,
2790
+ "loss": 0.0412,
2791
+ "step": 10725
2792
+ },
2793
+ {
2794
+ "epoch": 2.14,
2795
+ "learning_rate": 0.0004745641025641026,
2796
+ "loss": 0.0454,
2797
+ "step": 10750
2798
+ },
2799
+ {
2800
+ "epoch": 2.14,
2801
+ "learning_rate": 0.0004732820512820513,
2802
+ "loss": 0.0439,
2803
+ "step": 10775
2804
+ },
2805
+ {
2806
+ "epoch": 2.14,
2807
+ "learning_rate": 0.000472,
2808
+ "loss": 0.0412,
2809
+ "step": 10800
2810
+ },
2811
+ {
2812
+ "epoch": 2.14,
2813
+ "eval_loss": 0.08619751781225204,
2814
+ "eval_runtime": 217.221,
2815
+ "eval_samples_per_second": 4.604,
2816
+ "eval_steps_per_second": 0.29,
2817
+ "step": 10800
2818
+ },
2819
+ {
2820
+ "epoch": 2.14,
2821
+ "learning_rate": 0.0004707179487179487,
2822
+ "loss": 0.0537,
2823
+ "step": 10825
2824
+ },
2825
+ {
2826
+ "epoch": 2.14,
2827
+ "learning_rate": 0.00046943589743589744,
2828
+ "loss": 0.0436,
2829
+ "step": 10850
2830
+ },
2831
+ {
2832
+ "epoch": 2.15,
2833
+ "learning_rate": 0.00046815384615384617,
2834
+ "loss": 0.0404,
2835
+ "step": 10875
2836
+ },
2837
+ {
2838
+ "epoch": 2.15,
2839
+ "learning_rate": 0.0004668717948717949,
2840
+ "loss": 0.0303,
2841
+ "step": 10900
2842
+ },
2843
+ {
2844
+ "epoch": 2.15,
2845
+ "learning_rate": 0.0004655897435897436,
2846
+ "loss": 0.0332,
2847
+ "step": 10925
2848
+ },
2849
+ {
2850
+ "epoch": 2.15,
2851
+ "learning_rate": 0.0004643076923076923,
2852
+ "loss": 0.0501,
2853
+ "step": 10950
2854
+ },
2855
+ {
2856
+ "epoch": 2.15,
2857
+ "learning_rate": 0.00046302564102564104,
2858
+ "loss": 0.0483,
2859
+ "step": 10975
2860
+ },
2861
+ {
2862
+ "epoch": 2.15,
2863
+ "learning_rate": 0.00046174358974358977,
2864
+ "loss": 0.038,
2865
+ "step": 11000
2866
+ },
2867
+ {
2868
+ "epoch": 2.15,
2869
+ "learning_rate": 0.0004604615384615385,
2870
+ "loss": 0.034,
2871
+ "step": 11025
2872
+ },
2873
+ {
2874
+ "epoch": 2.15,
2875
+ "learning_rate": 0.0004591794871794872,
2876
+ "loss": 0.0446,
2877
+ "step": 11050
2878
+ },
2879
+ {
2880
+ "epoch": 2.16,
2881
+ "learning_rate": 0.00045789743589743595,
2882
+ "loss": 0.0476,
2883
+ "step": 11075
2884
+ },
2885
+ {
2886
+ "epoch": 2.16,
2887
+ "learning_rate": 0.0004566153846153846,
2888
+ "loss": 0.0368,
2889
+ "step": 11100
2890
+ },
2891
+ {
2892
+ "epoch": 2.16,
2893
+ "learning_rate": 0.0004553333333333333,
2894
+ "loss": 0.0411,
2895
+ "step": 11125
2896
+ },
2897
+ {
2898
+ "epoch": 2.16,
2899
+ "learning_rate": 0.00045405128205128204,
2900
+ "loss": 0.0349,
2901
+ "step": 11150
2902
+ },
2903
+ {
2904
+ "epoch": 2.16,
2905
+ "learning_rate": 0.00045276923076923077,
2906
+ "loss": 0.0417,
2907
+ "step": 11175
2908
+ },
2909
+ {
2910
+ "epoch": 2.16,
2911
+ "learning_rate": 0.0004514871794871795,
2912
+ "loss": 0.0402,
2913
+ "step": 11200
2914
+ },
2915
+ {
2916
+ "epoch": 2.16,
2917
+ "eval_loss": 0.08717386424541473,
2918
+ "eval_runtime": 217.4573,
2919
+ "eval_samples_per_second": 4.599,
2920
+ "eval_steps_per_second": 0.29,
2921
+ "step": 11200
2922
+ },
2923
+ {
2924
+ "epoch": 2.16,
2925
+ "learning_rate": 0.0004502051282051282,
2926
+ "loss": 0.0464,
2927
+ "step": 11225
2928
+ },
2929
+ {
2930
+ "epoch": 2.16,
2931
+ "learning_rate": 0.00044892307692307696,
2932
+ "loss": 0.0563,
2933
+ "step": 11250
2934
+ },
2935
+ {
2936
+ "epoch": 2.17,
2937
+ "learning_rate": 0.00044764102564102563,
2938
+ "loss": 0.0448,
2939
+ "step": 11275
2940
+ },
2941
+ {
2942
+ "epoch": 2.17,
2943
+ "learning_rate": 0.00044635897435897436,
2944
+ "loss": 0.0525,
2945
+ "step": 11300
2946
+ },
2947
+ {
2948
+ "epoch": 2.17,
2949
+ "learning_rate": 0.0004450769230769231,
2950
+ "loss": 0.0475,
2951
+ "step": 11325
2952
+ },
2953
+ {
2954
+ "epoch": 2.17,
2955
+ "learning_rate": 0.0004437948717948718,
2956
+ "loss": 0.0429,
2957
+ "step": 11350
2958
+ },
2959
+ {
2960
+ "epoch": 2.17,
2961
+ "learning_rate": 0.00044251282051282055,
2962
+ "loss": 0.0523,
2963
+ "step": 11375
2964
+ },
2965
+ {
2966
+ "epoch": 2.17,
2967
+ "learning_rate": 0.0004412307692307692,
2968
+ "loss": 0.0494,
2969
+ "step": 11400
2970
+ },
2971
+ {
2972
+ "epoch": 2.17,
2973
+ "learning_rate": 0.00043994871794871796,
2974
+ "loss": 0.0507,
2975
+ "step": 11425
2976
+ },
2977
+ {
2978
+ "epoch": 2.17,
2979
+ "learning_rate": 0.00043866666666666663,
2980
+ "loss": 0.0486,
2981
+ "step": 11450
2982
+ },
2983
+ {
2984
+ "epoch": 2.18,
2985
+ "learning_rate": 0.00043738461538461536,
2986
+ "loss": 0.0738,
2987
+ "step": 11475
2988
+ },
2989
+ {
2990
+ "epoch": 2.18,
2991
+ "learning_rate": 0.0004361025641025641,
2992
+ "loss": 0.0443,
2993
+ "step": 11500
2994
+ },
2995
+ {
2996
+ "epoch": 2.18,
2997
+ "learning_rate": 0.0004348205128205128,
2998
+ "loss": 0.06,
2999
+ "step": 11525
3000
+ },
3001
+ {
3002
+ "epoch": 2.18,
3003
+ "learning_rate": 0.00043353846153846155,
3004
+ "loss": 0.0438,
3005
+ "step": 11550
3006
+ },
3007
+ {
3008
+ "epoch": 2.18,
3009
+ "learning_rate": 0.0004322564102564103,
3010
+ "loss": 0.0393,
3011
+ "step": 11575
3012
+ },
3013
+ {
3014
+ "epoch": 2.18,
3015
+ "learning_rate": 0.000430974358974359,
3016
+ "loss": 0.0561,
3017
+ "step": 11600
3018
+ },
3019
+ {
3020
+ "epoch": 2.18,
3021
+ "eval_loss": 0.08517900854349136,
3022
+ "eval_runtime": 215.8861,
3023
+ "eval_samples_per_second": 4.632,
3024
+ "eval_steps_per_second": 0.292,
3025
+ "step": 11600
3026
+ },
3027
+ {
3028
+ "epoch": 2.18,
3029
+ "learning_rate": 0.00042969230769230774,
3030
+ "loss": 0.0633,
3031
+ "step": 11625
3032
+ },
3033
+ {
3034
+ "epoch": 2.18,
3035
+ "learning_rate": 0.0004284102564102564,
3036
+ "loss": 0.0937,
3037
+ "step": 11650
3038
+ },
3039
+ {
3040
+ "epoch": 2.19,
3041
+ "learning_rate": 0.00042712820512820515,
3042
+ "loss": 0.0647,
3043
+ "step": 11675
3044
+ },
3045
+ {
3046
+ "epoch": 2.19,
3047
+ "learning_rate": 0.0004258461538461538,
3048
+ "loss": 0.0517,
3049
+ "step": 11700
3050
+ },
3051
+ {
3052
+ "epoch": 2.19,
3053
+ "learning_rate": 0.00042456410256410255,
3054
+ "loss": 0.044,
3055
+ "step": 11725
3056
+ },
3057
+ {
3058
+ "epoch": 2.19,
3059
+ "learning_rate": 0.0004232820512820513,
3060
+ "loss": 0.048,
3061
+ "step": 11750
3062
+ },
3063
+ {
3064
+ "epoch": 2.19,
3065
+ "learning_rate": 0.000422,
3066
+ "loss": 0.0429,
3067
+ "step": 11775
3068
+ },
3069
+ {
3070
+ "epoch": 2.19,
3071
+ "learning_rate": 0.00042071794871794874,
3072
+ "loss": 0.0452,
3073
+ "step": 11800
3074
+ },
3075
+ {
3076
+ "epoch": 2.19,
3077
+ "learning_rate": 0.0004194358974358974,
3078
+ "loss": 0.0535,
3079
+ "step": 11825
3080
+ },
3081
+ {
3082
+ "epoch": 2.19,
3083
+ "learning_rate": 0.00041815384615384615,
3084
+ "loss": 0.049,
3085
+ "step": 11850
3086
+ },
3087
+ {
3088
+ "epoch": 2.2,
3089
+ "learning_rate": 0.0004168717948717949,
3090
+ "loss": 0.0464,
3091
+ "step": 11875
3092
+ },
3093
+ {
3094
+ "epoch": 2.2,
3095
+ "learning_rate": 0.0004155897435897436,
3096
+ "loss": 0.0564,
3097
+ "step": 11900
3098
+ },
3099
+ {
3100
+ "epoch": 2.2,
3101
+ "learning_rate": 0.00041430769230769234,
3102
+ "loss": 0.0673,
3103
+ "step": 11925
3104
+ },
3105
+ {
3106
+ "epoch": 3.0,
3107
+ "learning_rate": 0.00041302564102564107,
3108
+ "loss": 0.0679,
3109
+ "step": 11950
3110
+ },
3111
+ {
3112
+ "epoch": 3.0,
3113
+ "learning_rate": 0.0004117435897435898,
3114
+ "loss": 0.0642,
3115
+ "step": 11975
3116
+ },
3117
+ {
3118
+ "epoch": 3.0,
3119
+ "learning_rate": 0.00041046153846153847,
3120
+ "loss": 0.0664,
3121
+ "step": 12000
3122
+ },
3123
+ {
3124
+ "epoch": 3.0,
3125
+ "eval_loss": 0.10094589740037918,
3126
+ "eval_runtime": 219.2666,
3127
+ "eval_samples_per_second": 4.561,
3128
+ "eval_steps_per_second": 0.287,
3129
+ "step": 12000
3130
+ },
3131
+ {
3132
+ "epoch": 3.0,
3133
+ "learning_rate": 0.00040917948717948715,
3134
+ "loss": 0.0683,
3135
+ "step": 12025
3136
+ },
3137
+ {
3138
+ "epoch": 3.01,
3139
+ "learning_rate": 0.0004078974358974359,
3140
+ "loss": 0.0699,
3141
+ "step": 12050
3142
+ },
3143
+ {
3144
+ "epoch": 3.01,
3145
+ "learning_rate": 0.0004066153846153846,
3146
+ "loss": 0.0645,
3147
+ "step": 12075
3148
+ },
3149
+ {
3150
+ "epoch": 3.01,
3151
+ "learning_rate": 0.00040533333333333334,
3152
+ "loss": 0.0548,
3153
+ "step": 12100
3154
+ },
3155
+ {
3156
+ "epoch": 3.01,
3157
+ "learning_rate": 0.00040405128205128207,
3158
+ "loss": 0.0603,
3159
+ "step": 12125
3160
+ },
3161
+ {
3162
+ "epoch": 3.01,
3163
+ "learning_rate": 0.0004027692307692308,
3164
+ "loss": 0.0552,
3165
+ "step": 12150
3166
+ },
3167
+ {
3168
+ "epoch": 3.01,
3169
+ "learning_rate": 0.0004014871794871795,
3170
+ "loss": 0.0555,
3171
+ "step": 12175
3172
+ },
3173
+ {
3174
+ "epoch": 3.01,
3175
+ "learning_rate": 0.0004002051282051282,
3176
+ "loss": 0.0632,
3177
+ "step": 12200
3178
+ },
3179
+ {
3180
+ "epoch": 3.01,
3181
+ "learning_rate": 0.00039892307692307693,
3182
+ "loss": 0.0572,
3183
+ "step": 12225
3184
+ },
3185
+ {
3186
+ "epoch": 3.02,
3187
+ "learning_rate": 0.00039764102564102566,
3188
+ "loss": 0.0557,
3189
+ "step": 12250
3190
+ },
3191
+ {
3192
+ "epoch": 3.02,
3193
+ "learning_rate": 0.0003963589743589744,
3194
+ "loss": 0.0599,
3195
+ "step": 12275
3196
+ },
3197
+ {
3198
+ "epoch": 3.02,
3199
+ "learning_rate": 0.0003950769230769231,
3200
+ "loss": 0.056,
3201
+ "step": 12300
3202
+ },
3203
+ {
3204
+ "epoch": 3.02,
3205
+ "learning_rate": 0.0003937948717948718,
3206
+ "loss": 0.0583,
3207
+ "step": 12325
3208
+ },
3209
+ {
3210
+ "epoch": 3.02,
3211
+ "learning_rate": 0.00039251282051282053,
3212
+ "loss": 0.0564,
3213
+ "step": 12350
3214
+ },
3215
+ {
3216
+ "epoch": 3.02,
3217
+ "learning_rate": 0.0003912307692307692,
3218
+ "loss": 0.0538,
3219
+ "step": 12375
3220
+ },
3221
+ {
3222
+ "epoch": 3.02,
3223
+ "learning_rate": 0.00038994871794871793,
3224
+ "loss": 0.0525,
3225
+ "step": 12400
3226
+ },
3227
+ {
3228
+ "epoch": 3.02,
3229
+ "eval_loss": 0.13796697556972504,
3230
+ "eval_runtime": 216.1893,
3231
+ "eval_samples_per_second": 4.626,
3232
+ "eval_steps_per_second": 0.291,
3233
+ "step": 12400
3234
+ },
3235
+ {
3236
+ "epoch": 3.02,
3237
+ "learning_rate": 0.00038866666666666666,
3238
+ "loss": 0.0508,
3239
+ "step": 12425
3240
+ },
3241
+ {
3242
+ "epoch": 3.03,
3243
+ "learning_rate": 0.0003873846153846154,
3244
+ "loss": 0.0562,
3245
+ "step": 12450
3246
+ },
3247
+ {
3248
+ "epoch": 3.03,
3249
+ "learning_rate": 0.0003861025641025641,
3250
+ "loss": 0.0489,
3251
+ "step": 12475
3252
+ },
3253
+ {
3254
+ "epoch": 3.03,
3255
+ "learning_rate": 0.00038482051282051285,
3256
+ "loss": 0.0565,
3257
+ "step": 12500
3258
+ },
3259
+ {
3260
+ "epoch": 3.03,
3261
+ "learning_rate": 0.0003835384615384616,
3262
+ "loss": 0.0522,
3263
+ "step": 12525
3264
+ },
3265
+ {
3266
+ "epoch": 3.03,
3267
+ "learning_rate": 0.00038225641025641026,
3268
+ "loss": 0.0534,
3269
+ "step": 12550
3270
+ },
3271
+ {
3272
+ "epoch": 3.03,
3273
+ "learning_rate": 0.000380974358974359,
3274
+ "loss": 0.0539,
3275
+ "step": 12575
3276
+ },
3277
+ {
3278
+ "epoch": 3.03,
3279
+ "learning_rate": 0.0003796923076923077,
3280
+ "loss": 0.0457,
3281
+ "step": 12600
3282
+ },
3283
+ {
3284
+ "epoch": 3.03,
3285
+ "learning_rate": 0.0003784102564102564,
3286
+ "loss": 0.0509,
3287
+ "step": 12625
3288
+ },
3289
+ {
3290
+ "epoch": 3.04,
3291
+ "learning_rate": 0.0003771282051282051,
3292
+ "loss": 0.0512,
3293
+ "step": 12650
3294
+ },
3295
+ {
3296
+ "epoch": 3.04,
3297
+ "learning_rate": 0.00037584615384615385,
3298
+ "loss": 0.043,
3299
+ "step": 12675
3300
+ },
3301
+ {
3302
+ "epoch": 3.04,
3303
+ "learning_rate": 0.0003745641025641026,
3304
+ "loss": 0.0513,
3305
+ "step": 12700
3306
+ },
3307
+ {
3308
+ "epoch": 3.04,
3309
+ "learning_rate": 0.00037328205128205126,
3310
+ "loss": 0.0423,
3311
+ "step": 12725
3312
+ },
3313
+ {
3314
+ "epoch": 3.04,
3315
+ "learning_rate": 0.000372,
3316
+ "loss": 0.0482,
3317
+ "step": 12750
3318
+ },
3319
+ {
3320
+ "epoch": 3.04,
3321
+ "learning_rate": 0.0003707179487179487,
3322
+ "loss": 0.0517,
3323
+ "step": 12775
3324
+ },
3325
+ {
3326
+ "epoch": 3.04,
3327
+ "learning_rate": 0.00036943589743589745,
3328
+ "loss": 0.0414,
3329
+ "step": 12800
3330
+ },
3331
+ {
3332
+ "epoch": 3.04,
3333
+ "eval_loss": 0.1226244568824768,
3334
+ "eval_runtime": 216.9336,
3335
+ "eval_samples_per_second": 4.61,
3336
+ "eval_steps_per_second": 0.29,
3337
+ "step": 12800
3338
+ },
3339
+ {
3340
+ "epoch": 3.04,
3341
+ "learning_rate": 0.0003681538461538462,
3342
+ "loss": 0.0451,
3343
+ "step": 12825
3344
+ },
3345
+ {
3346
+ "epoch": 3.05,
3347
+ "learning_rate": 0.0003668717948717949,
3348
+ "loss": 0.0432,
3349
+ "step": 12850
3350
+ },
3351
+ {
3352
+ "epoch": 3.05,
3353
+ "learning_rate": 0.00036558974358974364,
3354
+ "loss": 0.0428,
3355
+ "step": 12875
3356
+ },
3357
+ {
3358
+ "epoch": 3.05,
3359
+ "learning_rate": 0.0003643076923076923,
3360
+ "loss": 0.0486,
3361
+ "step": 12900
3362
+ },
3363
+ {
3364
+ "epoch": 3.05,
3365
+ "learning_rate": 0.000363025641025641,
3366
+ "loss": 0.0459,
3367
+ "step": 12925
3368
+ },
3369
+ {
3370
+ "epoch": 3.05,
3371
+ "learning_rate": 0.0003617435897435897,
3372
+ "loss": 0.0465,
3373
+ "step": 12950
3374
+ },
3375
+ {
3376
+ "epoch": 3.05,
3377
+ "learning_rate": 0.00036046153846153845,
3378
+ "loss": 0.0417,
3379
+ "step": 12975
3380
+ },
3381
+ {
3382
+ "epoch": 3.05,
3383
+ "learning_rate": 0.0003591794871794872,
3384
+ "loss": 0.0417,
3385
+ "step": 13000
3386
+ },
3387
+ {
3388
+ "epoch": 3.05,
3389
+ "learning_rate": 0.00035794871794871797,
3390
+ "loss": 0.0433,
3391
+ "step": 13025
3392
+ },
3393
+ {
3394
+ "epoch": 3.06,
3395
+ "learning_rate": 0.0003566666666666667,
3396
+ "loss": 0.0494,
3397
+ "step": 13050
3398
+ },
3399
+ {
3400
+ "epoch": 3.06,
3401
+ "learning_rate": 0.00035538461538461543,
3402
+ "loss": 0.0407,
3403
+ "step": 13075
3404
+ },
3405
+ {
3406
+ "epoch": 3.06,
3407
+ "learning_rate": 0.0003541025641025641,
3408
+ "loss": 0.0372,
3409
+ "step": 13100
3410
+ },
3411
+ {
3412
+ "epoch": 3.06,
3413
+ "learning_rate": 0.0003528205128205128,
3414
+ "loss": 0.0455,
3415
+ "step": 13125
3416
+ },
3417
+ {
3418
+ "epoch": 3.06,
3419
+ "learning_rate": 0.0003515384615384615,
3420
+ "loss": 0.0375,
3421
+ "step": 13150
3422
+ },
3423
+ {
3424
+ "epoch": 3.06,
3425
+ "learning_rate": 0.00035025641025641024,
3426
+ "loss": 0.0435,
3427
+ "step": 13175
3428
+ },
3429
+ {
3430
+ "epoch": 3.06,
3431
+ "learning_rate": 0.00034897435897435897,
3432
+ "loss": 0.0437,
3433
+ "step": 13200
3434
+ },
3435
+ {
3436
+ "epoch": 3.06,
3437
+ "eval_loss": 0.1415424644947052,
3438
+ "eval_runtime": 215.4136,
3439
+ "eval_samples_per_second": 4.642,
3440
+ "eval_steps_per_second": 0.292,
3441
+ "step": 13200
3442
+ },
3443
+ {
3444
+ "epoch": 3.06,
3445
+ "learning_rate": 0.0003476923076923077,
3446
+ "loss": 0.0432,
3447
+ "step": 13225
3448
+ },
3449
+ {
3450
+ "epoch": 3.07,
3451
+ "learning_rate": 0.00034641025641025643,
3452
+ "loss": 0.0393,
3453
+ "step": 13250
3454
+ },
3455
+ {
3456
+ "epoch": 3.07,
3457
+ "learning_rate": 0.00034512820512820516,
3458
+ "loss": 0.0446,
3459
+ "step": 13275
3460
+ },
3461
+ {
3462
+ "epoch": 3.07,
3463
+ "learning_rate": 0.0003438461538461539,
3464
+ "loss": 0.0417,
3465
+ "step": 13300
3466
+ },
3467
+ {
3468
+ "epoch": 3.07,
3469
+ "learning_rate": 0.00034256410256410256,
3470
+ "loss": 0.0386,
3471
+ "step": 13325
3472
+ },
3473
+ {
3474
+ "epoch": 3.07,
3475
+ "learning_rate": 0.0003412820512820513,
3476
+ "loss": 0.0395,
3477
+ "step": 13350
3478
+ },
3479
+ {
3480
+ "epoch": 3.07,
3481
+ "learning_rate": 0.00034,
3482
+ "loss": 0.0394,
3483
+ "step": 13375
3484
+ },
3485
+ {
3486
+ "epoch": 3.07,
3487
+ "learning_rate": 0.0003387179487179487,
3488
+ "loss": 0.0364,
3489
+ "step": 13400
3490
+ },
3491
+ {
3492
+ "epoch": 3.07,
3493
+ "learning_rate": 0.00033743589743589743,
3494
+ "loss": 0.0334,
3495
+ "step": 13425
3496
+ },
3497
+ {
3498
+ "epoch": 3.08,
3499
+ "learning_rate": 0.00033615384615384616,
3500
+ "loss": 0.0374,
3501
+ "step": 13450
3502
+ },
3503
+ {
3504
+ "epoch": 3.08,
3505
+ "learning_rate": 0.0003348717948717949,
3506
+ "loss": 0.0352,
3507
+ "step": 13475
3508
+ },
3509
+ {
3510
+ "epoch": 3.08,
3511
+ "learning_rate": 0.00033358974358974357,
3512
+ "loss": 0.0317,
3513
+ "step": 13500
3514
+ },
3515
+ {
3516
+ "epoch": 3.08,
3517
+ "learning_rate": 0.0003323076923076923,
3518
+ "loss": 0.0427,
3519
+ "step": 13525
3520
+ },
3521
+ {
3522
+ "epoch": 3.08,
3523
+ "learning_rate": 0.000331025641025641,
3524
+ "loss": 0.038,
3525
+ "step": 13550
3526
+ },
3527
+ {
3528
+ "epoch": 3.08,
3529
+ "learning_rate": 0.00032974358974358976,
3530
+ "loss": 0.0414,
3531
+ "step": 13575
3532
+ },
3533
+ {
3534
+ "epoch": 3.08,
3535
+ "learning_rate": 0.0003284615384615385,
3536
+ "loss": 0.0368,
3537
+ "step": 13600
3538
+ },
3539
+ {
3540
+ "epoch": 3.08,
3541
+ "eval_loss": 0.08724867552518845,
3542
+ "eval_runtime": 216.1216,
3543
+ "eval_samples_per_second": 4.627,
3544
+ "eval_steps_per_second": 0.292,
3545
+ "step": 13600
3546
+ },
3547
+ {
3548
+ "epoch": 3.08,
3549
+ "learning_rate": 0.0003271794871794872,
3550
+ "loss": 0.0357,
3551
+ "step": 13625
3552
+ },
3553
+ {
3554
+ "epoch": 3.09,
3555
+ "learning_rate": 0.00032589743589743594,
3556
+ "loss": 0.0337,
3557
+ "step": 13650
3558
+ },
3559
+ {
3560
+ "epoch": 3.09,
3561
+ "learning_rate": 0.0003246153846153846,
3562
+ "loss": 0.0337,
3563
+ "step": 13675
3564
+ },
3565
+ {
3566
+ "epoch": 3.09,
3567
+ "learning_rate": 0.0003233333333333333,
3568
+ "loss": 0.033,
3569
+ "step": 13700
3570
+ },
3571
+ {
3572
+ "epoch": 3.09,
3573
+ "learning_rate": 0.000322051282051282,
3574
+ "loss": 0.0326,
3575
+ "step": 13725
3576
+ },
3577
+ {
3578
+ "epoch": 3.09,
3579
+ "learning_rate": 0.00032076923076923076,
3580
+ "loss": 0.0434,
3581
+ "step": 13750
3582
+ },
3583
+ {
3584
+ "epoch": 3.09,
3585
+ "learning_rate": 0.0003194871794871795,
3586
+ "loss": 0.0365,
3587
+ "step": 13775
3588
+ },
3589
+ {
3590
+ "epoch": 3.09,
3591
+ "learning_rate": 0.0003182051282051282,
3592
+ "loss": 0.0327,
3593
+ "step": 13800
3594
+ },
3595
+ {
3596
+ "epoch": 3.09,
3597
+ "learning_rate": 0.00031692307692307695,
3598
+ "loss": 0.0324,
3599
+ "step": 13825
3600
+ },
3601
+ {
3602
+ "epoch": 3.1,
3603
+ "learning_rate": 0.0003156410256410256,
3604
+ "loss": 0.038,
3605
+ "step": 13850
3606
+ },
3607
+ {
3608
+ "epoch": 3.1,
3609
+ "learning_rate": 0.00031435897435897435,
3610
+ "loss": 0.0379,
3611
+ "step": 13875
3612
+ },
3613
+ {
3614
+ "epoch": 3.1,
3615
+ "learning_rate": 0.0003130769230769231,
3616
+ "loss": 0.0409,
3617
+ "step": 13900
3618
+ },
3619
+ {
3620
+ "epoch": 3.1,
3621
+ "learning_rate": 0.0003117948717948718,
3622
+ "loss": 0.0391,
3623
+ "step": 13925
3624
+ },
3625
+ {
3626
+ "epoch": 3.1,
3627
+ "learning_rate": 0.00031051282051282054,
3628
+ "loss": 0.036,
3629
+ "step": 13950
3630
+ },
3631
+ {
3632
+ "epoch": 3.1,
3633
+ "learning_rate": 0.00030923076923076927,
3634
+ "loss": 0.038,
3635
+ "step": 13975
3636
+ },
3637
+ {
3638
+ "epoch": 3.1,
3639
+ "learning_rate": 0.000307948717948718,
3640
+ "loss": 0.0311,
3641
+ "step": 14000
3642
+ },
3643
+ {
3644
+ "epoch": 3.1,
3645
+ "eval_loss": 0.08812109380960464,
3646
+ "eval_runtime": 216.1669,
3647
+ "eval_samples_per_second": 4.626,
3648
+ "eval_steps_per_second": 0.291,
3649
+ "step": 14000
3650
+ }
3651
+ ],
3652
+ "logging_steps": 25,
3653
+ "max_steps": 20000,
3654
+ "num_train_epochs": 9223372036854775807,
3655
+ "save_steps": 400,
3656
+ "total_flos": 1.441894654420992e+21,
3657
+ "trial_name": null,
3658
+ "trial_params": null
3659
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2fe4fdcfbb247cbf779e1bf2e5ee28a0a2bc5aca1b3d9101106284866a837ee6
3
+ size 4219
vocab.json ADDED
The diff for this file is too large to render. See raw diff