mjbuehler commited on
Commit
9e2e718
1 Parent(s): a6ae9e1

Upload 41 files

Browse files
Files changed (41) hide show
  1. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_info.txt +428 -0
  2. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_10.pt +3 -0
  3. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_12.pt +3 -0
  4. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_14.pt +3 -0
  5. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_16.pt +3 -0
  6. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_18.pt +3 -0
  7. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_2.pt +3 -0
  8. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_20.pt +3 -0
  9. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_22.pt +3 -0
  10. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_24.pt +3 -0
  11. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_26.pt +3 -0
  12. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_28.pt +3 -0
  13. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_30.pt +3 -0
  14. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_32.pt +3 -0
  15. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_34.pt +3 -0
  16. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_36.pt +3 -0
  17. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_38.pt +3 -0
  18. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_4.pt +3 -0
  19. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_40.pt +3 -0
  20. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_42.pt +3 -0
  21. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_44.pt +3 -0
  22. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_46.pt +3 -0
  23. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_48.pt +3 -0
  24. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_50.pt +3 -0
  25. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_6.pt +3 -0
  26. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_8.pt +3 -0
  27. 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/loss_data.csv +0 -0
  28. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_info.txt +428 -0
  29. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_18.pt +3 -0
  30. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_24.pt +3 -0
  31. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_26.pt +3 -0
  32. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_30.pt +3 -0
  33. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_32.pt +3 -0
  34. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_34.pt +3 -0
  35. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_40.pt +3 -0
  36. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_42.pt +3 -0
  37. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_44.pt +3 -0
  38. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_48.pt +3 -0
  39. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_50.pt +3 -0
  40. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_8.pt +3 -0
  41. 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/loss_data.csv +0 -0
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_info.txt ADDED
@@ -0,0 +1,428 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Model Name: 07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog
2
+ Model Created @ 2024-07-23 10-37-31 Eastern Time
3
+ Number of trainable parameters: 12735744
4
+ Model Architecture:
5
+ AutoregressiveWrapper(
6
+ (net): TransformerWrapper(
7
+ (token_emb): TokenEmbedding(
8
+ (emb): Embedding(256, 256)
9
+ )
10
+ (post_emb_norm): Identity()
11
+ (emb_dropout): Dropout(p=0.0, inplace=False)
12
+ (project_emb): Identity()
13
+ (attn_layers): Decoder(
14
+ (layers): ModuleList(
15
+ (0): ModuleList(
16
+ (0): ModuleList(
17
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
18
+ (1-2): 2 x None
19
+ )
20
+ (1): Attention(
21
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
22
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
23
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
24
+ (attend): Attend(
25
+ (attn_dropout): Dropout(p=0.0, inplace=False)
26
+ )
27
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
28
+ )
29
+ (2): Residual()
30
+ )
31
+ (1): ModuleList(
32
+ (0): ModuleList(
33
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
34
+ (1-2): 2 x None
35
+ )
36
+ (1): FeedForward(
37
+ (ff): Sequential(
38
+ (0): Sequential(
39
+ (0): Linear(in_features=256, out_features=1024, bias=True)
40
+ (1): GELU(approximate='none')
41
+ )
42
+ (1): Dropout(p=0.0, inplace=False)
43
+ (2): Linear(in_features=1024, out_features=256, bias=True)
44
+ )
45
+ )
46
+ (2): Residual()
47
+ )
48
+ (2): ModuleList(
49
+ (0): ModuleList(
50
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
51
+ (1-2): 2 x None
52
+ )
53
+ (1): Attention(
54
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
55
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
56
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
57
+ (attend): Attend(
58
+ (attn_dropout): Dropout(p=0.0, inplace=False)
59
+ )
60
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
61
+ )
62
+ (2): Residual()
63
+ )
64
+ (3): ModuleList(
65
+ (0): ModuleList(
66
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
67
+ (1-2): 2 x None
68
+ )
69
+ (1): FeedForward(
70
+ (ff): Sequential(
71
+ (0): Sequential(
72
+ (0): Linear(in_features=256, out_features=1024, bias=True)
73
+ (1): GELU(approximate='none')
74
+ )
75
+ (1): Dropout(p=0.0, inplace=False)
76
+ (2): Linear(in_features=1024, out_features=256, bias=True)
77
+ )
78
+ )
79
+ (2): Residual()
80
+ )
81
+ (4): ModuleList(
82
+ (0): ModuleList(
83
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
84
+ (1-2): 2 x None
85
+ )
86
+ (1): Attention(
87
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
88
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
89
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
90
+ (attend): Attend(
91
+ (attn_dropout): Dropout(p=0.0, inplace=False)
92
+ )
93
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
94
+ )
95
+ (2): Residual()
96
+ )
97
+ (5): ModuleList(
98
+ (0): ModuleList(
99
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
100
+ (1-2): 2 x None
101
+ )
102
+ (1): FeedForward(
103
+ (ff): Sequential(
104
+ (0): Sequential(
105
+ (0): Linear(in_features=256, out_features=1024, bias=True)
106
+ (1): GELU(approximate='none')
107
+ )
108
+ (1): Dropout(p=0.0, inplace=False)
109
+ (2): Linear(in_features=1024, out_features=256, bias=True)
110
+ )
111
+ )
112
+ (2): Residual()
113
+ )
114
+ (6): ModuleList(
115
+ (0): ModuleList(
116
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
117
+ (1-2): 2 x None
118
+ )
119
+ (1): Attention(
120
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
121
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
122
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
123
+ (attend): Attend(
124
+ (attn_dropout): Dropout(p=0.0, inplace=False)
125
+ )
126
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
127
+ )
128
+ (2): Residual()
129
+ )
130
+ (7): ModuleList(
131
+ (0): ModuleList(
132
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
133
+ (1-2): 2 x None
134
+ )
135
+ (1): FeedForward(
136
+ (ff): Sequential(
137
+ (0): Sequential(
138
+ (0): Linear(in_features=256, out_features=1024, bias=True)
139
+ (1): GELU(approximate='none')
140
+ )
141
+ (1): Dropout(p=0.0, inplace=False)
142
+ (2): Linear(in_features=1024, out_features=256, bias=True)
143
+ )
144
+ )
145
+ (2): Residual()
146
+ )
147
+ (8): ModuleList(
148
+ (0): ModuleList(
149
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
150
+ (1-2): 2 x None
151
+ )
152
+ (1): Attention(
153
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
154
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
155
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
156
+ (attend): Attend(
157
+ (attn_dropout): Dropout(p=0.0, inplace=False)
158
+ )
159
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
160
+ )
161
+ (2): Residual()
162
+ )
163
+ (9): ModuleList(
164
+ (0): ModuleList(
165
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
166
+ (1-2): 2 x None
167
+ )
168
+ (1): FeedForward(
169
+ (ff): Sequential(
170
+ (0): Sequential(
171
+ (0): Linear(in_features=256, out_features=1024, bias=True)
172
+ (1): GELU(approximate='none')
173
+ )
174
+ (1): Dropout(p=0.0, inplace=False)
175
+ (2): Linear(in_features=1024, out_features=256, bias=True)
176
+ )
177
+ )
178
+ (2): Residual()
179
+ )
180
+ (10): ModuleList(
181
+ (0): ModuleList(
182
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
183
+ (1-2): 2 x None
184
+ )
185
+ (1): Attention(
186
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
187
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
188
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
189
+ (attend): Attend(
190
+ (attn_dropout): Dropout(p=0.0, inplace=False)
191
+ )
192
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
193
+ )
194
+ (2): Residual()
195
+ )
196
+ (11): ModuleList(
197
+ (0): ModuleList(
198
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
199
+ (1-2): 2 x None
200
+ )
201
+ (1): FeedForward(
202
+ (ff): Sequential(
203
+ (0): Sequential(
204
+ (0): Linear(in_features=256, out_features=1024, bias=True)
205
+ (1): GELU(approximate='none')
206
+ )
207
+ (1): Dropout(p=0.0, inplace=False)
208
+ (2): Linear(in_features=1024, out_features=256, bias=True)
209
+ )
210
+ )
211
+ (2): Residual()
212
+ )
213
+ (12): ModuleList(
214
+ (0): ModuleList(
215
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
216
+ (1-2): 2 x None
217
+ )
218
+ (1): Attention(
219
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
220
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
221
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
222
+ (attend): Attend(
223
+ (attn_dropout): Dropout(p=0.0, inplace=False)
224
+ )
225
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
226
+ )
227
+ (2): Residual()
228
+ )
229
+ (13): ModuleList(
230
+ (0): ModuleList(
231
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
232
+ (1-2): 2 x None
233
+ )
234
+ (1): FeedForward(
235
+ (ff): Sequential(
236
+ (0): Sequential(
237
+ (0): Linear(in_features=256, out_features=1024, bias=True)
238
+ (1): GELU(approximate='none')
239
+ )
240
+ (1): Dropout(p=0.0, inplace=False)
241
+ (2): Linear(in_features=1024, out_features=256, bias=True)
242
+ )
243
+ )
244
+ (2): Residual()
245
+ )
246
+ (14): ModuleList(
247
+ (0): ModuleList(
248
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
249
+ (1-2): 2 x None
250
+ )
251
+ (1): Attention(
252
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
253
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
254
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
255
+ (attend): Attend(
256
+ (attn_dropout): Dropout(p=0.0, inplace=False)
257
+ )
258
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
259
+ )
260
+ (2): Residual()
261
+ )
262
+ (15): ModuleList(
263
+ (0): ModuleList(
264
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
265
+ (1-2): 2 x None
266
+ )
267
+ (1): FeedForward(
268
+ (ff): Sequential(
269
+ (0): Sequential(
270
+ (0): Linear(in_features=256, out_features=1024, bias=True)
271
+ (1): GELU(approximate='none')
272
+ )
273
+ (1): Dropout(p=0.0, inplace=False)
274
+ (2): Linear(in_features=1024, out_features=256, bias=True)
275
+ )
276
+ )
277
+ (2): Residual()
278
+ )
279
+ (16): ModuleList(
280
+ (0): ModuleList(
281
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
282
+ (1-2): 2 x None
283
+ )
284
+ (1): Attention(
285
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
286
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
287
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
288
+ (attend): Attend(
289
+ (attn_dropout): Dropout(p=0.0, inplace=False)
290
+ )
291
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
292
+ )
293
+ (2): Residual()
294
+ )
295
+ (17): ModuleList(
296
+ (0): ModuleList(
297
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
298
+ (1-2): 2 x None
299
+ )
300
+ (1): FeedForward(
301
+ (ff): Sequential(
302
+ (0): Sequential(
303
+ (0): Linear(in_features=256, out_features=1024, bias=True)
304
+ (1): GELU(approximate='none')
305
+ )
306
+ (1): Dropout(p=0.0, inplace=False)
307
+ (2): Linear(in_features=1024, out_features=256, bias=True)
308
+ )
309
+ )
310
+ (2): Residual()
311
+ )
312
+ (18): ModuleList(
313
+ (0): ModuleList(
314
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
315
+ (1-2): 2 x None
316
+ )
317
+ (1): Attention(
318
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
319
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
320
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
321
+ (attend): Attend(
322
+ (attn_dropout): Dropout(p=0.0, inplace=False)
323
+ )
324
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
325
+ )
326
+ (2): Residual()
327
+ )
328
+ (19): ModuleList(
329
+ (0): ModuleList(
330
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
331
+ (1-2): 2 x None
332
+ )
333
+ (1): FeedForward(
334
+ (ff): Sequential(
335
+ (0): Sequential(
336
+ (0): Linear(in_features=256, out_features=1024, bias=True)
337
+ (1): GELU(approximate='none')
338
+ )
339
+ (1): Dropout(p=0.0, inplace=False)
340
+ (2): Linear(in_features=1024, out_features=256, bias=True)
341
+ )
342
+ )
343
+ (2): Residual()
344
+ )
345
+ (20): ModuleList(
346
+ (0): ModuleList(
347
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
348
+ (1-2): 2 x None
349
+ )
350
+ (1): Attention(
351
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
352
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
353
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
354
+ (attend): Attend(
355
+ (attn_dropout): Dropout(p=0.0, inplace=False)
356
+ )
357
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
358
+ )
359
+ (2): Residual()
360
+ )
361
+ (21): ModuleList(
362
+ (0): ModuleList(
363
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
364
+ (1-2): 2 x None
365
+ )
366
+ (1): FeedForward(
367
+ (ff): Sequential(
368
+ (0): Sequential(
369
+ (0): Linear(in_features=256, out_features=1024, bias=True)
370
+ (1): GELU(approximate='none')
371
+ )
372
+ (1): Dropout(p=0.0, inplace=False)
373
+ (2): Linear(in_features=1024, out_features=256, bias=True)
374
+ )
375
+ )
376
+ (2): Residual()
377
+ )
378
+ (22): ModuleList(
379
+ (0): ModuleList(
380
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
381
+ (1-2): 2 x None
382
+ )
383
+ (1): Attention(
384
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
385
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
386
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
387
+ (attend): Attend(
388
+ (attn_dropout): Dropout(p=0.0, inplace=False)
389
+ )
390
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
391
+ )
392
+ (2): Residual()
393
+ )
394
+ (23): ModuleList(
395
+ (0): ModuleList(
396
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
397
+ (1-2): 2 x None
398
+ )
399
+ (1): FeedForward(
400
+ (ff): Sequential(
401
+ (0): Sequential(
402
+ (0): Linear(in_features=256, out_features=1024, bias=True)
403
+ (1): GELU(approximate='none')
404
+ )
405
+ (1): Dropout(p=0.0, inplace=False)
406
+ (2): Linear(in_features=1024, out_features=256, bias=True)
407
+ )
408
+ )
409
+ (2): Residual()
410
+ )
411
+ )
412
+ (rotary_pos_emb): RotaryEmbedding()
413
+ (final_norm): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
414
+ )
415
+ (to_logits): Linear(in_features=256, out_features=256, bias=False)
416
+ )
417
+ )
418
+ Model Parameters:
419
+ num_tokens: 256
420
+ max_seq_len: 2071
421
+ dim: 256
422
+ depth: 12
423
+ heads: 8
424
+ attn_dim_head: 64
425
+ rotary_pos_emb: True
426
+ attn_flash: True
427
+
428
+ Note: July-22-This model is testing whether or not masking will affect the osutcome of the model. We are still using a broad entropy/ homogeneous training set for this run.
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_10.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0a7404efe04f04e8de78e7b9b4851594b9f0a73f5a199382f6be44ff941ae2c
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_12.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f53b0ce18b220866010134b8b2e60f91ca95b2ade0aafaff988c73b4173bd13f
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_14.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d80b7df6c2515b89aa9f250d33060994e2c3bc5c32a77918e27f957d5b164b87
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_16.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:434f8dccf0fd0d71e10c91f7b4146004fa0018e50b2e4f5cc4cbbe009f98a516
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_18.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9be1ff42c135741e832f028bc4ce8d86eef4db5fa5dd434c47979266c1dcb4ca
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_2.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89db1da561f6f9bce4f296f71507311516dd0074aa9fa1b0dd0bcea7826b2f9b
3
+ size 51000634
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_20.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e6537fdf571aa0d1e8c487af07fad7fa61a352f781b5f393a37c4489cc4cd26
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_22.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53265a179b8e767d34253f16f119e1098d862ffdc5e2b472766dbe9c0739e84f
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_24.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd0d58a56df6e1f189d1ce75cc6859298a2692d3747751308986892b312ddb30
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_26.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:151383dea7ee7aade68fc3b529e37564ce6f83d77c9e51d892aea25cef557f27
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_28.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d06bd0a7ca00ccf37447aeb800d12f082124b0cd4411afd3338689ac06f8b362
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_30.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:495dbeb6eee67a18c57b0464d79b44e752a4b2af50df007e97288f5f6ad0dc0b
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_32.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d8524ad6b2139d38095d7c01387285226c99d7e5d773dc37db0961d398d92cf
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_34.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9fc286f03bdea264f7762b668d555df62696411e3f0f7766adf3e20580a51bb
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_36.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dee45d28cc8fbecfc84c69c43197ea1a0fd8589132e786b1a8cab5a55a42a31
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_38.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99c43c5627e61e995424e3c50359b918192bc9e8b27586fd7cd5fb2739719adc
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_4.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:839c244713f259a4de2779a6f4b0b6b2ff8d8e1a48ca03653110435403f58e8f
3
+ size 51000634
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_40.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9758032867d77997a2e625e85c103f6ad8e1cbdeeca76681b842463a447c1550
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_42.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea6fccf32571daab0c04f40309dffc82d43b73d4886482d7b63976814ca4a8f7
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_44.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da3467dbf4f796df323f17d878cb1463132d6176b005d8b9e7aa1b22038f5fb2
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_46.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e80f6f9648014a6064a4f9178cc2160121a5390d168b0ea237efa8255bc2a36
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_48.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57443942053f3e04b173e0492e5152782e2391758af90b342cbd7ad8808f666b
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_50.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43ff077757252cfc7485cdb2c71cf8f661ebcbde2ddde5d647587e30930ccfc3
3
+ size 51000826
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_6.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16fb2e4381d1748d7503c2181f876974ba4e0f3f1baf4f3bf44b8a4b4ffe4537
3
+ size 51000634
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/Conway_GPT_v7_epoch_8.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32da1a08333ae6f53b402443066bb6f4b7d33805780ee958c464414c714d0e70
3
+ size 51000634
07_22_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_On_Broad_Entrpoy_Homog_2024-07-23 10-37-31/loss_data.csv ADDED
The diff for this file is too large to render. See raw diff
 
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_info.txt ADDED
@@ -0,0 +1,428 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Model Name: 08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog
2
+ Model Created @ 2024-08-14 12-24-10 Eastern Time
3
+ Number of trainable parameters: 12735744
4
+ Model Architecture:
5
+ AutoregressiveWrapper(
6
+ (net): TransformerWrapper(
7
+ (token_emb): TokenEmbedding(
8
+ (emb): Embedding(256, 256)
9
+ )
10
+ (post_emb_norm): Identity()
11
+ (emb_dropout): Dropout(p=0.0, inplace=False)
12
+ (project_emb): Identity()
13
+ (attn_layers): Decoder(
14
+ (layers): ModuleList(
15
+ (0): ModuleList(
16
+ (0): ModuleList(
17
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
18
+ (1-2): 2 x None
19
+ )
20
+ (1): Attention(
21
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
22
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
23
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
24
+ (attend): Attend(
25
+ (attn_dropout): Dropout(p=0.0, inplace=False)
26
+ )
27
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
28
+ )
29
+ (2): Residual()
30
+ )
31
+ (1): ModuleList(
32
+ (0): ModuleList(
33
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
34
+ (1-2): 2 x None
35
+ )
36
+ (1): FeedForward(
37
+ (ff): Sequential(
38
+ (0): Sequential(
39
+ (0): Linear(in_features=256, out_features=1024, bias=True)
40
+ (1): GELU(approximate='none')
41
+ )
42
+ (1): Dropout(p=0.0, inplace=False)
43
+ (2): Linear(in_features=1024, out_features=256, bias=True)
44
+ )
45
+ )
46
+ (2): Residual()
47
+ )
48
+ (2): ModuleList(
49
+ (0): ModuleList(
50
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
51
+ (1-2): 2 x None
52
+ )
53
+ (1): Attention(
54
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
55
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
56
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
57
+ (attend): Attend(
58
+ (attn_dropout): Dropout(p=0.0, inplace=False)
59
+ )
60
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
61
+ )
62
+ (2): Residual()
63
+ )
64
+ (3): ModuleList(
65
+ (0): ModuleList(
66
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
67
+ (1-2): 2 x None
68
+ )
69
+ (1): FeedForward(
70
+ (ff): Sequential(
71
+ (0): Sequential(
72
+ (0): Linear(in_features=256, out_features=1024, bias=True)
73
+ (1): GELU(approximate='none')
74
+ )
75
+ (1): Dropout(p=0.0, inplace=False)
76
+ (2): Linear(in_features=1024, out_features=256, bias=True)
77
+ )
78
+ )
79
+ (2): Residual()
80
+ )
81
+ (4): ModuleList(
82
+ (0): ModuleList(
83
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
84
+ (1-2): 2 x None
85
+ )
86
+ (1): Attention(
87
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
88
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
89
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
90
+ (attend): Attend(
91
+ (attn_dropout): Dropout(p=0.0, inplace=False)
92
+ )
93
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
94
+ )
95
+ (2): Residual()
96
+ )
97
+ (5): ModuleList(
98
+ (0): ModuleList(
99
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
100
+ (1-2): 2 x None
101
+ )
102
+ (1): FeedForward(
103
+ (ff): Sequential(
104
+ (0): Sequential(
105
+ (0): Linear(in_features=256, out_features=1024, bias=True)
106
+ (1): GELU(approximate='none')
107
+ )
108
+ (1): Dropout(p=0.0, inplace=False)
109
+ (2): Linear(in_features=1024, out_features=256, bias=True)
110
+ )
111
+ )
112
+ (2): Residual()
113
+ )
114
+ (6): ModuleList(
115
+ (0): ModuleList(
116
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
117
+ (1-2): 2 x None
118
+ )
119
+ (1): Attention(
120
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
121
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
122
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
123
+ (attend): Attend(
124
+ (attn_dropout): Dropout(p=0.0, inplace=False)
125
+ )
126
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
127
+ )
128
+ (2): Residual()
129
+ )
130
+ (7): ModuleList(
131
+ (0): ModuleList(
132
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
133
+ (1-2): 2 x None
134
+ )
135
+ (1): FeedForward(
136
+ (ff): Sequential(
137
+ (0): Sequential(
138
+ (0): Linear(in_features=256, out_features=1024, bias=True)
139
+ (1): GELU(approximate='none')
140
+ )
141
+ (1): Dropout(p=0.0, inplace=False)
142
+ (2): Linear(in_features=1024, out_features=256, bias=True)
143
+ )
144
+ )
145
+ (2): Residual()
146
+ )
147
+ (8): ModuleList(
148
+ (0): ModuleList(
149
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
150
+ (1-2): 2 x None
151
+ )
152
+ (1): Attention(
153
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
154
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
155
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
156
+ (attend): Attend(
157
+ (attn_dropout): Dropout(p=0.0, inplace=False)
158
+ )
159
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
160
+ )
161
+ (2): Residual()
162
+ )
163
+ (9): ModuleList(
164
+ (0): ModuleList(
165
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
166
+ (1-2): 2 x None
167
+ )
168
+ (1): FeedForward(
169
+ (ff): Sequential(
170
+ (0): Sequential(
171
+ (0): Linear(in_features=256, out_features=1024, bias=True)
172
+ (1): GELU(approximate='none')
173
+ )
174
+ (1): Dropout(p=0.0, inplace=False)
175
+ (2): Linear(in_features=1024, out_features=256, bias=True)
176
+ )
177
+ )
178
+ (2): Residual()
179
+ )
180
+ (10): ModuleList(
181
+ (0): ModuleList(
182
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
183
+ (1-2): 2 x None
184
+ )
185
+ (1): Attention(
186
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
187
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
188
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
189
+ (attend): Attend(
190
+ (attn_dropout): Dropout(p=0.0, inplace=False)
191
+ )
192
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
193
+ )
194
+ (2): Residual()
195
+ )
196
+ (11): ModuleList(
197
+ (0): ModuleList(
198
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
199
+ (1-2): 2 x None
200
+ )
201
+ (1): FeedForward(
202
+ (ff): Sequential(
203
+ (0): Sequential(
204
+ (0): Linear(in_features=256, out_features=1024, bias=True)
205
+ (1): GELU(approximate='none')
206
+ )
207
+ (1): Dropout(p=0.0, inplace=False)
208
+ (2): Linear(in_features=1024, out_features=256, bias=True)
209
+ )
210
+ )
211
+ (2): Residual()
212
+ )
213
+ (12): ModuleList(
214
+ (0): ModuleList(
215
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
216
+ (1-2): 2 x None
217
+ )
218
+ (1): Attention(
219
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
220
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
221
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
222
+ (attend): Attend(
223
+ (attn_dropout): Dropout(p=0.0, inplace=False)
224
+ )
225
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
226
+ )
227
+ (2): Residual()
228
+ )
229
+ (13): ModuleList(
230
+ (0): ModuleList(
231
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
232
+ (1-2): 2 x None
233
+ )
234
+ (1): FeedForward(
235
+ (ff): Sequential(
236
+ (0): Sequential(
237
+ (0): Linear(in_features=256, out_features=1024, bias=True)
238
+ (1): GELU(approximate='none')
239
+ )
240
+ (1): Dropout(p=0.0, inplace=False)
241
+ (2): Linear(in_features=1024, out_features=256, bias=True)
242
+ )
243
+ )
244
+ (2): Residual()
245
+ )
246
+ (14): ModuleList(
247
+ (0): ModuleList(
248
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
249
+ (1-2): 2 x None
250
+ )
251
+ (1): Attention(
252
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
253
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
254
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
255
+ (attend): Attend(
256
+ (attn_dropout): Dropout(p=0.0, inplace=False)
257
+ )
258
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
259
+ )
260
+ (2): Residual()
261
+ )
262
+ (15): ModuleList(
263
+ (0): ModuleList(
264
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
265
+ (1-2): 2 x None
266
+ )
267
+ (1): FeedForward(
268
+ (ff): Sequential(
269
+ (0): Sequential(
270
+ (0): Linear(in_features=256, out_features=1024, bias=True)
271
+ (1): GELU(approximate='none')
272
+ )
273
+ (1): Dropout(p=0.0, inplace=False)
274
+ (2): Linear(in_features=1024, out_features=256, bias=True)
275
+ )
276
+ )
277
+ (2): Residual()
278
+ )
279
+ (16): ModuleList(
280
+ (0): ModuleList(
281
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
282
+ (1-2): 2 x None
283
+ )
284
+ (1): Attention(
285
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
286
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
287
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
288
+ (attend): Attend(
289
+ (attn_dropout): Dropout(p=0.0, inplace=False)
290
+ )
291
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
292
+ )
293
+ (2): Residual()
294
+ )
295
+ (17): ModuleList(
296
+ (0): ModuleList(
297
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
298
+ (1-2): 2 x None
299
+ )
300
+ (1): FeedForward(
301
+ (ff): Sequential(
302
+ (0): Sequential(
303
+ (0): Linear(in_features=256, out_features=1024, bias=True)
304
+ (1): GELU(approximate='none')
305
+ )
306
+ (1): Dropout(p=0.0, inplace=False)
307
+ (2): Linear(in_features=1024, out_features=256, bias=True)
308
+ )
309
+ )
310
+ (2): Residual()
311
+ )
312
+ (18): ModuleList(
313
+ (0): ModuleList(
314
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
315
+ (1-2): 2 x None
316
+ )
317
+ (1): Attention(
318
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
319
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
320
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
321
+ (attend): Attend(
322
+ (attn_dropout): Dropout(p=0.0, inplace=False)
323
+ )
324
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
325
+ )
326
+ (2): Residual()
327
+ )
328
+ (19): ModuleList(
329
+ (0): ModuleList(
330
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
331
+ (1-2): 2 x None
332
+ )
333
+ (1): FeedForward(
334
+ (ff): Sequential(
335
+ (0): Sequential(
336
+ (0): Linear(in_features=256, out_features=1024, bias=True)
337
+ (1): GELU(approximate='none')
338
+ )
339
+ (1): Dropout(p=0.0, inplace=False)
340
+ (2): Linear(in_features=1024, out_features=256, bias=True)
341
+ )
342
+ )
343
+ (2): Residual()
344
+ )
345
+ (20): ModuleList(
346
+ (0): ModuleList(
347
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
348
+ (1-2): 2 x None
349
+ )
350
+ (1): Attention(
351
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
352
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
353
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
354
+ (attend): Attend(
355
+ (attn_dropout): Dropout(p=0.0, inplace=False)
356
+ )
357
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
358
+ )
359
+ (2): Residual()
360
+ )
361
+ (21): ModuleList(
362
+ (0): ModuleList(
363
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
364
+ (1-2): 2 x None
365
+ )
366
+ (1): FeedForward(
367
+ (ff): Sequential(
368
+ (0): Sequential(
369
+ (0): Linear(in_features=256, out_features=1024, bias=True)
370
+ (1): GELU(approximate='none')
371
+ )
372
+ (1): Dropout(p=0.0, inplace=False)
373
+ (2): Linear(in_features=1024, out_features=256, bias=True)
374
+ )
375
+ )
376
+ (2): Residual()
377
+ )
378
+ (22): ModuleList(
379
+ (0): ModuleList(
380
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
381
+ (1-2): 2 x None
382
+ )
383
+ (1): Attention(
384
+ (to_q): Linear(in_features=256, out_features=512, bias=False)
385
+ (to_k): Linear(in_features=256, out_features=512, bias=False)
386
+ (to_v): Linear(in_features=256, out_features=512, bias=False)
387
+ (attend): Attend(
388
+ (attn_dropout): Dropout(p=0.0, inplace=False)
389
+ )
390
+ (to_out): Linear(in_features=512, out_features=256, bias=False)
391
+ )
392
+ (2): Residual()
393
+ )
394
+ (23): ModuleList(
395
+ (0): ModuleList(
396
+ (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
397
+ (1-2): 2 x None
398
+ )
399
+ (1): FeedForward(
400
+ (ff): Sequential(
401
+ (0): Sequential(
402
+ (0): Linear(in_features=256, out_features=1024, bias=True)
403
+ (1): GELU(approximate='none')
404
+ )
405
+ (1): Dropout(p=0.0, inplace=False)
406
+ (2): Linear(in_features=1024, out_features=256, bias=True)
407
+ )
408
+ )
409
+ (2): Residual()
410
+ )
411
+ )
412
+ (rotary_pos_emb): RotaryEmbedding()
413
+ (final_norm): LayerNorm((256,), eps=1e-05, elementwise_affine=True)
414
+ )
415
+ (to_logits): Linear(in_features=256, out_features=256, bias=False)
416
+ )
417
+ )
418
+ Model Parameters:
419
+ num_tokens: 256
420
+ max_seq_len: 2071
421
+ dim: 256
422
+ depth: 12
423
+ heads: 8
424
+ attn_dim_head: 64
425
+ rotary_pos_emb: True
426
+ attn_flash: True
427
+
428
+ Note:Aug 14, 2024 - this model is a test using rot pos enc, no extra masking, and high entropy data on a 32 by 32 toroidal grid. Poor performance on the test set is anticipated.
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_18.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f31cc4e972c4c39401bab9141abb0ec759ac3ce7c7926fcc6a0688347cbbc86a
3
+ size 51000826
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_24.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7d8eed1245a57d1f1cd750c14d563b74e37dcc89a6a7851d9bd8007673b6a35
3
+ size 51000826
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_26.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4ef11b905f81f1d4281e710675fafeaf717a21adfb829089ff9f2f4f21491a3
3
+ size 51000826
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_30.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3dcf2219da1458017aa9cf0f744e17523297d9332666512e4d36a1e3c5740e8e
3
+ size 51000826
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_32.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3db6059b9acd53e96d997791d532ff4fe0486976fbb90f71db840f3c93f90cf1
3
+ size 51000826
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_34.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:605b45658b0597c53f206c240a9b1c2d5225c8cc4f95f8591f85de35bd3ef367
3
+ size 51000826
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_40.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e1c1256d5edc4660a9c2437cb87463d45c938cdacc211502bf30f79ab9fbf62
3
+ size 51000826
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_42.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16af24853c96b9a2af347f77ca7263f17ce8c8fcc1c43ca380749f70b0ebc164
3
+ size 51000826
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_44.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e263f49bba8cde18936615c2963908d08cff4fdf10133047dc815fd7c421bd2d
3
+ size 51000826
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_48.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc2ea470325b9ef1cc52d89759a1c32bc0f33e347b9feb4366fc6c31410e6394
3
+ size 51000826
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_50.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2bce33853b2ae2ec3e35eff23c7472337e5a45f95065cd668d26edc5d5eeeef
3
+ size 51000826
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/Conway_GPT_v7_epoch_8.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa8331e5ee3a744705cd0b2cece4d994e3a32c812a04f5b8d7d17c0dbbe470ee
3
+ size 51000634
08_14_2024_Conway_2_State_Jump_Rot_Pos_On_Masking_Off_High_Entrpoy_Homog_2024-08-14 12-24-10/loss_data.csv ADDED
The diff for this file is too large to render. See raw diff