kmfoda commited on
Commit
69b7910
·
verified ·
1 Parent(s): f901945

Run 3. Outer Step 1. Inner Step 0. Peers 30.

Browse files
Files changed (4) hide show
  1. config.json +21 -21
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
  4. outer_optimizer.pt +1 -1
config.json CHANGED
@@ -1,12 +1,12 @@
1
  {
2
- "_name_or_path": "distributed/optimized-gpt2-1b",
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "NON_PARTICIPATING",
6
- "1": "NON_PARTICIPATING",
7
  "10": "NON_PARTICIPATING",
8
  "100": "NON_PARTICIPATING",
9
- "101": "SUCCESS",
10
  "102": "SUCCESS",
11
  "103": "NON_PARTICIPATING",
12
  "104": "NON_PARTICIPATING",
@@ -14,7 +14,7 @@
14
  "106": "NON_PARTICIPATING",
15
  "107": "NON_PARTICIPATING",
16
  "108": "NON_PARTICIPATING",
17
- "109": "NON_PARTICIPATING",
18
  "11": "NON_PARTICIPATING",
19
  "110": "NON_PARTICIPATING",
20
  "111": "NON_PARTICIPATING",
@@ -23,7 +23,7 @@
23
  "114": "NON_PARTICIPATING",
24
  "115": "NON_PARTICIPATING",
25
  "116": "NON_PARTICIPATING",
26
- "117": "SUCCESS",
27
  "118": "NON_PARTICIPATING",
28
  "119": "NON_PARTICIPATING",
29
  "12": "NON_PARTICIPATING",
@@ -77,7 +77,7 @@
77
  "163": "NON_PARTICIPATING",
78
  "164": "SUCCESS",
79
  "165": "NON_PARTICIPATING",
80
- "166": "NON_PARTICIPATING",
81
  "167": "NON_PARTICIPATING",
82
  "168": "NON_PARTICIPATING",
83
  "169": "NON_PARTICIPATING",
@@ -89,7 +89,7 @@
89
  "174": "NON_PARTICIPATING",
90
  "175": "NON_PARTICIPATING",
91
  "176": "NON_PARTICIPATING",
92
- "177": "SUCCESS",
93
  "178": "NON_PARTICIPATING",
94
  "179": "NON_PARTICIPATING",
95
  "18": "NON_PARTICIPATING",
@@ -115,8 +115,8 @@
115
  "198": "NON_PARTICIPATING",
116
  "199": "NON_PARTICIPATING",
117
  "2": "NON_PARTICIPATING",
118
- "20": "NON_PARTICIPATING",
119
- "200": "NON_PARTICIPATING",
120
  "201": "NON_PARTICIPATING",
121
  "202": "NON_PARTICIPATING",
122
  "203": "NON_PARTICIPATING",
@@ -141,10 +141,10 @@
141
  "220": "NON_PARTICIPATING",
142
  "221": "SUCCESS",
143
  "222": "NON_PARTICIPATING",
144
- "223": "FAIL",
145
  "224": "NON_PARTICIPATING",
146
- "225": "NON_PARTICIPATING",
147
- "226": "SUCCESS",
148
  "227": "NON_PARTICIPATING",
149
  "228": "NON_PARTICIPATING",
150
  "229": "NON_PARTICIPATING",
@@ -184,11 +184,11 @@
184
  "3": "NON_PARTICIPATING",
185
  "30": "NON_PARTICIPATING",
186
  "31": "NON_PARTICIPATING",
187
- "32": "NON_PARTICIPATING",
188
  "33": "NON_PARTICIPATING",
189
  "34": "NON_PARTICIPATING",
190
  "35": "NON_PARTICIPATING",
191
- "36": "NON_PARTICIPATING",
192
  "37": "NON_PARTICIPATING",
193
  "38": "NON_PARTICIPATING",
194
  "39": "NON_PARTICIPATING",
@@ -212,17 +212,17 @@
212
  "55": "NON_PARTICIPATING",
213
  "56": "NON_PARTICIPATING",
214
  "57": "NON_PARTICIPATING",
215
- "58": "SUCCESS",
216
  "59": "NON_PARTICIPATING",
217
  "6": "NON_PARTICIPATING",
218
  "60": "NON_PARTICIPATING",
219
  "61": "NON_PARTICIPATING",
220
- "62": "NON_PARTICIPATING",
221
  "63": "NON_PARTICIPATING",
222
- "64": "NON_PARTICIPATING",
223
  "65": "NON_PARTICIPATING",
224
  "66": "NON_PARTICIPATING",
225
- "67": "NON_PARTICIPATING",
226
  "68": "NON_PARTICIPATING",
227
  "69": "NON_PARTICIPATING",
228
  "7": "NON_PARTICIPATING",
@@ -238,7 +238,7 @@
238
  "79": "NON_PARTICIPATING",
239
  "8": "NON_PARTICIPATING",
240
  "80": "SUCCESS",
241
- "81": "NON_PARTICIPATING",
242
  "82": "NON_PARTICIPATING",
243
  "83": "NON_PARTICIPATING",
244
  "84": "NON_PARTICIPATING",
@@ -247,7 +247,7 @@
247
  "87": "NON_PARTICIPATING",
248
  "88": "NON_PARTICIPATING",
249
  "89": "NON_PARTICIPATING",
250
- "9": "NON_PARTICIPATING",
251
  "90": "NON_PARTICIPATING",
252
  "91": "NON_PARTICIPATING",
253
  "92": "NON_PARTICIPATING",
@@ -275,7 +275,7 @@
275
  "initializer_range": 0.02,
276
  "inner_step": 0,
277
  "inner_steps": 0,
278
- "last_allreduce_block": 5363566,
279
  "layer_norm_epsilon": 1e-05,
280
  "model_type": "gpt_optimized",
281
  "n_embd": 1280,
 
1
  {
2
+ "_name_or_path": "kmfoda/gpt2-1b-miner-1",
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "NON_PARTICIPATING",
6
+ "1": "SUCCESS",
7
  "10": "NON_PARTICIPATING",
8
  "100": "NON_PARTICIPATING",
9
+ "101": "NON_PARTICIPATING",
10
  "102": "SUCCESS",
11
  "103": "NON_PARTICIPATING",
12
  "104": "NON_PARTICIPATING",
 
14
  "106": "NON_PARTICIPATING",
15
  "107": "NON_PARTICIPATING",
16
  "108": "NON_PARTICIPATING",
17
+ "109": "SUCCESS",
18
  "11": "NON_PARTICIPATING",
19
  "110": "NON_PARTICIPATING",
20
  "111": "NON_PARTICIPATING",
 
23
  "114": "NON_PARTICIPATING",
24
  "115": "NON_PARTICIPATING",
25
  "116": "NON_PARTICIPATING",
26
+ "117": "NON_PARTICIPATING",
27
  "118": "NON_PARTICIPATING",
28
  "119": "NON_PARTICIPATING",
29
  "12": "NON_PARTICIPATING",
 
77
  "163": "NON_PARTICIPATING",
78
  "164": "SUCCESS",
79
  "165": "NON_PARTICIPATING",
80
+ "166": "SUCCESS",
81
  "167": "NON_PARTICIPATING",
82
  "168": "NON_PARTICIPATING",
83
  "169": "NON_PARTICIPATING",
 
89
  "174": "NON_PARTICIPATING",
90
  "175": "NON_PARTICIPATING",
91
  "176": "NON_PARTICIPATING",
92
+ "177": "NON_PARTICIPATING",
93
  "178": "NON_PARTICIPATING",
94
  "179": "NON_PARTICIPATING",
95
  "18": "NON_PARTICIPATING",
 
115
  "198": "NON_PARTICIPATING",
116
  "199": "NON_PARTICIPATING",
117
  "2": "NON_PARTICIPATING",
118
+ "20": "SUCCESS",
119
+ "200": "SUCCESS",
120
  "201": "NON_PARTICIPATING",
121
  "202": "NON_PARTICIPATING",
122
  "203": "NON_PARTICIPATING",
 
141
  "220": "NON_PARTICIPATING",
142
  "221": "SUCCESS",
143
  "222": "NON_PARTICIPATING",
144
+ "223": "SUCCESS",
145
  "224": "NON_PARTICIPATING",
146
+ "225": "SUCCESS",
147
+ "226": "NON_PARTICIPATING",
148
  "227": "NON_PARTICIPATING",
149
  "228": "NON_PARTICIPATING",
150
  "229": "NON_PARTICIPATING",
 
184
  "3": "NON_PARTICIPATING",
185
  "30": "NON_PARTICIPATING",
186
  "31": "NON_PARTICIPATING",
187
+ "32": "SUCCESS",
188
  "33": "NON_PARTICIPATING",
189
  "34": "NON_PARTICIPATING",
190
  "35": "NON_PARTICIPATING",
191
+ "36": "SUCCESS",
192
  "37": "NON_PARTICIPATING",
193
  "38": "NON_PARTICIPATING",
194
  "39": "NON_PARTICIPATING",
 
212
  "55": "NON_PARTICIPATING",
213
  "56": "NON_PARTICIPATING",
214
  "57": "NON_PARTICIPATING",
215
+ "58": "NON_PARTICIPATING",
216
  "59": "NON_PARTICIPATING",
217
  "6": "NON_PARTICIPATING",
218
  "60": "NON_PARTICIPATING",
219
  "61": "NON_PARTICIPATING",
220
+ "62": "SUCCESS",
221
  "63": "NON_PARTICIPATING",
222
+ "64": "SUCCESS",
223
  "65": "NON_PARTICIPATING",
224
  "66": "NON_PARTICIPATING",
225
+ "67": "SUCCESS",
226
  "68": "NON_PARTICIPATING",
227
  "69": "NON_PARTICIPATING",
228
  "7": "NON_PARTICIPATING",
 
238
  "79": "NON_PARTICIPATING",
239
  "8": "NON_PARTICIPATING",
240
  "80": "SUCCESS",
241
+ "81": "SUCCESS",
242
  "82": "NON_PARTICIPATING",
243
  "83": "NON_PARTICIPATING",
244
  "84": "NON_PARTICIPATING",
 
247
  "87": "NON_PARTICIPATING",
248
  "88": "NON_PARTICIPATING",
249
  "89": "NON_PARTICIPATING",
250
+ "9": "SUCCESS",
251
  "90": "NON_PARTICIPATING",
252
  "91": "NON_PARTICIPATING",
253
  "92": "NON_PARTICIPATING",
 
275
  "initializer_range": 0.02,
276
  "inner_step": 0,
277
  "inner_steps": 0,
278
+ "last_allreduce_block": 5366095,
279
  "layer_norm_epsilon": 1e-05,
280
  "model_type": "gpt_optimized",
281
  "n_embd": 1280,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:50852309c11bba1c68b1fc599de303950b01245d3f8e70c6ec061ebf1229b0c3
3
  size 2944
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b7debebcd6091553c4fc865a538a768e96dd8cc8a2e3e2ec4737f397ffa275db
3
  size 2944
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:90b3e428e98fa30e20e42438a9da3f27f84d804d2b9741a9c840c0dcdc0d9ae6
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35effb7859e9e12b09ef36c9a7703d1e5590acdbc9496d16917dfab7a37957a0
3
  size 4040701744
outer_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6e93dc7617a37533af9fd53bd75e69a0272fe35fdea96597fc44b10deaf7c748
3
  size 4040805354
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1901361aab2440a81da5873a57484f0477cd2f96e47f756842f80c55059ff417
3
  size 4040805354