Fraser-Greenlee commited on
Commit
d5a3cb1
1 Parent(s): 17dcc3a
make_variations/generate_with_codeT5.ipynb CHANGED
@@ -30,7 +30,7 @@
30
  },
31
  {
32
  "cell_type": "code",
33
- "execution_count": 18,
34
  "metadata": {},
35
  "outputs": [],
36
  "source": [
@@ -52,7 +52,7 @@
52
  },
53
  {
54
  "cell_type": "code",
55
- "execution_count": 19,
56
  "metadata": {},
57
  "outputs": [],
58
  "source": [
@@ -74,7 +74,7 @@
74
  },
75
  {
76
  "cell_type": "code",
77
- "execution_count": 20,
78
  "metadata": {},
79
  "outputs": [],
80
  "source": [
@@ -84,7 +84,7 @@
84
  },
85
  {
86
  "cell_type": "code",
87
- "execution_count": 21,
88
  "metadata": {},
89
  "outputs": [],
90
  "source": [
@@ -106,7 +106,7 @@
106
  },
107
  {
108
  "cell_type": "code",
109
- "execution_count": 22,
110
  "metadata": {},
111
  "outputs": [],
112
  "source": [
@@ -118,7 +118,7 @@
118
  },
119
  {
120
  "cell_type": "code",
121
- "execution_count": 23,
122
  "metadata": {},
123
  "outputs": [],
124
  "source": [
@@ -152,7 +152,7 @@
152
  },
153
  {
154
  "cell_type": "code",
155
- "execution_count": 37,
156
  "metadata": {},
157
  "outputs": [],
158
  "source": [
@@ -168,10 +168,11 @@
168
  },
169
  {
170
  "cell_type": "code",
171
- "execution_count": 38,
172
  "metadata": {},
173
  "outputs": [],
174
  "source": [
 
175
  "def trace_code(start_state: str, code: str):\n",
176
  " state = {}\n",
177
  " try:\n",
@@ -188,7 +189,7 @@
188
  },
189
  {
190
  "cell_type": "code",
191
- "execution_count": 39,
192
  "metadata": {},
193
  "outputs": [
194
  {
@@ -223,7 +224,7 @@
223
  " 'end': 'g = 200; i = 1; l = [100, 100, 100, 0, 0]'}]"
224
  ]
225
  },
226
- "execution_count": 39,
227
  "metadata": {},
228
  "output_type": "execute_result"
229
  }
@@ -258,7 +259,7 @@
258
  },
259
  {
260
  "cell_type": "code",
261
- "execution_count": 46,
262
  "metadata": {},
263
  "outputs": [
264
  {
@@ -266,53 +267,54 @@
266
  "text/plain": [
267
  "(['g = 100', 'i = 1'],\n",
268
  " ['l = [1, 2]',\n",
269
- " 'l = [0,1,2,3,3,4,5,6,9]',\n",
270
- " 'l = [0.01]',\n",
271
- " 'l = [5, 6, 8, 12,]',\n",
272
- " 'l = [g * 2, 1]',\n",
273
- " 'l = [g / 100.0 + i]',\n",
274
- " 'l = [100, 100,]',\n",
275
- " 'l = [0, 1]',\n",
 
276
  " 'l = [1]',\n",
277
- " 'l = [15, 100, 1000100, i,]',\n",
278
- " 'l = [100, 1, 0]',\n",
279
- " 'l = [i, m]',\n",
280
- " 'l = [.1,.2]',\n",
281
- " 'l = [100100]',\n",
282
- " 'l = [100, 100,100]',\n",
283
- " 'l = [1, 2, 3, 4]',\n",
284
- " 'l = [0.001, 0.001, 0.001, 1.001]',\n",
285
- " 'l = [100, 100, 100]',\n",
286
- " 'l = [0.9]',\n",
287
- " 'l = [1, 2, 3]',\n",
288
- " 'l = [g / i]',\n",
 
 
 
 
289
  " 'l = [g]',\n",
290
- " 'l = [i - 1]',\n",
 
291
  " 'l = [1, 1, 1]',\n",
292
- " 'l = [10, 20]',\n",
293
- " 'l = [0, 2, 3]',\n",
294
- " 'l = [100]',\n",
295
- " 'l = [1, 1, 2]',\n",
296
- " 'l = [10109090909090909090909090909090909]',\n",
297
  " 'l = [g, i]',\n",
298
- " 'l = [1, 2,2]',\n",
299
- " 'l = [0, 0]',\n",
300
- " 'l = [10, 20, 20]',\n",
301
  " 'l = [i]',\n",
302
- " 'l = [g, 1]',\n",
303
- " 'l = [0, 1, 0]',\n",
304
- " 'l = [100, 90]',\n",
305
- " 'l = [10, 5, 6]',\n",
306
- " 'l = [g, g-i, l]',\n",
307
  " 'l = [0]',\n",
308
- " 'l = [1,2,3,2,7,5,6,8,9]',\n",
309
- " 'l = [floa_e_b, floa_e_c, f]',\n",
310
- " 'l = [100, 100, 1]',\n",
311
- " 'l = [0.5, 1.5]',\n",
312
- " 'l = [100, 1001, 1001, 1012, 1015]'])"
313
  ]
314
  },
315
- "execution_count": 46,
316
  "metadata": {},
317
  "output_type": "execute_result"
318
  }
@@ -334,25 +336,25 @@
334
  },
335
  {
336
  "cell_type": "code",
337
- "execution_count": 50,
338
  "metadata": {},
339
  "outputs": [
340
  {
341
  "data": {
342
  "text/plain": [
343
- "(53,\n",
344
- " [{'start': 'g = 1; i = 1; l = [100, 100, 0, 0, -100, -100]',\n",
345
  " 'code': 'g += l[i]',\n",
346
- " 'end': 'g = 101; i = 1; l = [100, 100, 0, 0, -100, -100]'},\n",
347
- " {'start': 'g = 2; i = 1; l = [100, 100, 0, 0, -100, -100]',\n",
348
  " 'code': 'g += l[i]',\n",
349
- " 'end': 'g = 102; i = 1; l = [100, 100, 0, 0, -100, -100]'},\n",
350
- " {'start': 'g = 3; i = 1; l = [100, 100, 0, 0, -100, -100]',\n",
351
  " 'code': 'g += l[i]',\n",
352
- " 'end': 'g = 103; i = 1; l = [100, 100, 0, 0, -100, -100]'}])"
353
  ]
354
  },
355
- "execution_count": 50,
356
  "metadata": {},
357
  "output_type": "execute_result"
358
  }
@@ -378,18 +380,81 @@
378
  },
379
  {
380
  "cell_type": "code",
381
- "execution_count": 4,
382
  "metadata": {},
383
- "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
384
  "source": [
385
- "import json\n",
 
 
 
 
 
 
 
 
 
 
 
 
386
  "\n",
 
387
  "with open('../data.jsonl', 'r', encoding=\"utf-8\") as f:\n",
388
- " for id_, line in enumerate(f):\n",
 
389
  " row = json.loads(line)\n",
390
  " alts = make_alternatives(row)\n",
391
- " # TODO: save alts\n",
392
- " break"
 
 
 
393
  ]
394
  },
395
  {
30
  },
31
  {
32
  "cell_type": "code",
33
+ "execution_count": 58,
34
  "metadata": {},
35
  "outputs": [],
36
  "source": [
52
  },
53
  {
54
  "cell_type": "code",
55
+ "execution_count": 59,
56
  "metadata": {},
57
  "outputs": [],
58
  "source": [
74
  },
75
  {
76
  "cell_type": "code",
77
+ "execution_count": 60,
78
  "metadata": {},
79
  "outputs": [],
80
  "source": [
84
  },
85
  {
86
  "cell_type": "code",
87
+ "execution_count": 61,
88
  "metadata": {},
89
  "outputs": [],
90
  "source": [
106
  },
107
  {
108
  "cell_type": "code",
109
+ "execution_count": 62,
110
  "metadata": {},
111
  "outputs": [],
112
  "source": [
118
  },
119
  {
120
  "cell_type": "code",
121
+ "execution_count": 63,
122
  "metadata": {},
123
  "outputs": [],
124
  "source": [
152
  },
153
  {
154
  "cell_type": "code",
155
+ "execution_count": 64,
156
  "metadata": {},
157
  "outputs": [],
158
  "source": [
168
  },
169
  {
170
  "cell_type": "code",
171
+ "execution_count": 65,
172
  "metadata": {},
173
  "outputs": [],
174
  "source": [
175
+ "@timeout(seconds=3)\n",
176
  "def trace_code(start_state: str, code: str):\n",
177
  " state = {}\n",
178
  " try:\n",
189
  },
190
  {
191
  "cell_type": "code",
192
+ "execution_count": 66,
193
  "metadata": {},
194
  "outputs": [
195
  {
224
  " 'end': 'g = 200; i = 1; l = [100, 100, 100, 0, 0]'}]"
225
  ]
226
  },
227
+ "execution_count": 66,
228
  "metadata": {},
229
  "output_type": "execute_result"
230
  }
259
  },
260
  {
261
  "cell_type": "code",
262
+ "execution_count": 67,
263
  "metadata": {},
264
  "outputs": [
265
  {
267
  "text/plain": [
268
  "(['g = 100', 'i = 1'],\n",
269
  " ['l = [1, 2]',\n",
270
+ " 'l = [g, i, j]',\n",
271
+ " 'l = [i,g]',\n",
272
+ " 'l = [k, j, k2]',\n",
273
+ " 'l = [1.0, 0.01, 0.01, 0.01]',\n",
274
+ " 'l = [k, j]',\n",
275
+ " 'l = [j]',\n",
276
+ " 'l = [r, t, d]',\n",
277
+ " 'l = [g, i, l]',\n",
278
  " 'l = [1]',\n",
279
+ " 'l = [l]',\n",
280
+ " 'l = [i, 1]',\n",
281
+ " 'l = [g + h*g + i*i]',\n",
282
+ " 'l = [g, i, 1]',\n",
283
+ " 'l = [b[i], b [ j ]]',\n",
284
+ " 'l = [2, 3, 3,]',\n",
285
+ " 'l = [a[g, e, c]]',\n",
286
+ " 'l = [b [ a ] [b[3]]]',\n",
287
+ " 'l = [g - 1, i]',\n",
288
+ " 'l = [2]',\n",
289
+ " 'l = [5]',\n",
290
+ " 'l = [6, 5, 3, 2]',\n",
291
+ " 'l = [b[g], b[i], b[g]]',\n",
292
+ " 'l = [b[i][j]]',\n",
293
+ " 'l = [c[j ], c[j+1 ]]',\n",
294
+ " 'l = [i, g * g]',\n",
295
  " 'l = [g]',\n",
296
+ " 'l = [g, i, f]',\n",
297
+ " 'l = [a [ i ]]',\n",
298
  " 'l = [1, 1, 1]',\n",
299
+ " 'l = [1, 4, 4]',\n",
300
+ " 'l = [b [j ]]',\n",
 
 
 
301
  " 'l = [g, i]',\n",
302
+ " 'l = [1, 0, 0]',\n",
303
+ " 'l = [i, l]',\n",
304
+ " 'l = [0.0]',\n",
305
  " 'l = [i]',\n",
306
+ " 'l = [g, i, 0]',\n",
307
+ " 'l = [{ i }]',\n",
308
+ " 'l = [i, v[0], v[1],l]',\n",
309
+ " 'l = [c[j ],]',\n",
 
310
  " 'l = [0]',\n",
311
+ " 'l = [a [ 0 ]]',\n",
312
+ " 'l = [d, g, i]',\n",
313
+ " 'l = [g, g, i]',\n",
314
+ " 'l = [b[j ]]'])"
 
315
  ]
316
  },
317
+ "execution_count": 67,
318
  "metadata": {},
319
  "output_type": "execute_result"
320
  }
336
  },
337
  {
338
  "cell_type": "code",
339
+ "execution_count": 68,
340
  "metadata": {},
341
  "outputs": [
342
  {
343
  "data": {
344
  "text/plain": [
345
+ "(29,\n",
346
+ " [{'start': 'g = 50; i = 1; l = [100, 100, 0, 0, -100, -100]',\n",
347
  " 'code': 'g += l[i]',\n",
348
+ " 'end': 'g = 150; i = 1; l = [100, 100, 0, 0, -100, -100]'},\n",
349
+ " {'start': 'g = 10; i = 1; l = [100, 100, 0, 0, -100, -100]',\n",
350
  " 'code': 'g += l[i]',\n",
351
+ " 'end': 'g = 110; i = 1; l = [100, 100, 0, 0, -100, -100]'},\n",
352
+ " {'start': 'g = -3; i = 1; l = [100, 100, 0, 0, -100, -100]',\n",
353
  " 'code': 'g += l[i]',\n",
354
+ " 'end': 'g = 97; i = 1; l = [100, 100, 0, 0, -100, -100]'}])"
355
  ]
356
  },
357
+ "execution_count": 68,
358
  "metadata": {},
359
  "output_type": "execute_result"
360
  }
380
  },
381
  {
382
  "cell_type": "code",
383
+ "execution_count": 69,
384
  "metadata": {},
385
+ "outputs": [
386
+ {
387
+ "name": "stderr",
388
+ "output_type": "stream",
389
+ "text": [
390
+ " 0%| | 1/8968897 [00:09<24001:13:52, 9.63s/it]<string>:1: SyntaxWarning: 'int' object is not callable; perhaps you missed a comma?\n",
391
+ "<string>:1: SyntaxWarning: 'int' object is not callable; perhaps you missed a comma?\n",
392
+ "<string>:1: SyntaxWarning: 'int' object is not callable; perhaps you missed a comma?\n",
393
+ " 0%| | 22/8968897 [02:45<14831:12:33, 5.95s/it]<string>:1: SyntaxWarning: 'int' object is not callable; perhaps you missed a comma?\n",
394
+ "<string>:1: SyntaxWarning: 'int' object is not subscriptable; perhaps you missed a comma?\n",
395
+ "<string>:1: SyntaxWarning: 'int' object is not subscriptable; perhaps you missed a comma?\n",
396
+ " 0%| | 34/8968897 [04:26<26565:33:36, 10.66s/it]<string>:1: SyntaxWarning: 'int' object is not subscriptable; perhaps you missed a comma?\n",
397
+ "<string>:1: SyntaxWarning: 'int' object is not callable; perhaps you missed a comma?\n",
398
+ " 0%| | 44/8968897 [10:01<34031:25:54, 13.66s/it] \n"
399
+ ]
400
+ },
401
+ {
402
+ "ename": "KeyboardInterrupt",
403
+ "evalue": "",
404
+ "output_type": "error",
405
+ "traceback": [
406
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
407
+ "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
408
+ "Input \u001b[0;32mIn [69]\u001b[0m, in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 18\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m id_, line \u001b[38;5;129;01min\u001b[39;00m tqdm(\u001b[38;5;28menumerate\u001b[39m(f), total\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m8968897\u001b[39m):\n\u001b[1;32m 19\u001b[0m row \u001b[38;5;241m=\u001b[39m json\u001b[38;5;241m.\u001b[39mloads(line)\n\u001b[0;32m---> 20\u001b[0m alts \u001b[38;5;241m=\u001b[39m \u001b[43mmake_alternatives\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrow\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 21\u001b[0m new_rows \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m [row] \u001b[38;5;241m+\u001b[39m alts\n\u001b[1;32m 22\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m new_rows \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(new_rows) \u001b[38;5;241m%\u001b[39m \u001b[38;5;241m10_000\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m:\n",
409
+ "Input \u001b[0;32mIn [68]\u001b[0m, in \u001b[0;36mmake_alternatives\u001b[0;34m(row)\u001b[0m\n\u001b[1;32m 4\u001b[0m alts \u001b[38;5;241m=\u001b[39m []\n\u001b[1;32m 5\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(\u001b[38;5;28mlen\u001b[39m(start_vars)):\n\u001b[0;32m----> 6\u001b[0m alt_start_vars, var_alts \u001b[38;5;241m=\u001b[39m \u001b[43mget_alts_for_var\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mlist\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mstart_vars\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mi\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrow\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mcode\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 7\u001b[0m alts \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m get_working_alts(alt_start_vars, var_alts, row[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mcode\u001b[39m\u001b[38;5;124m'\u001b[39m])\n\u001b[1;32m 9\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m alts\n",
410
+ "Input \u001b[0;32mIn [67]\u001b[0m, in \u001b[0;36mget_alts_for_var\u001b[0;34m(start_vars, alt_i, code)\u001b[0m\n\u001b[1;32m 2\u001b[0m start_vars[alt_i] \u001b[38;5;241m=\u001b[39m temp_var(start_vars[alt_i])\n\u001b[1;32m 3\u001b[0m code \u001b[38;5;241m=\u001b[39m make_code(start_vars, row[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mcode\u001b[39m\u001b[38;5;124m'\u001b[39m])\n\u001b[0;32m----> 4\u001b[0m var_alts \u001b[38;5;241m=\u001b[39m \u001b[43malt_from_code\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcode\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 5\u001b[0m alt_var_temp \u001b[38;5;241m=\u001b[39m start_vars[alt_i]\n\u001b[1;32m 6\u001b[0m \u001b[38;5;28;01mdel\u001b[39;00m start_vars[alt_i]\n",
411
+ "Input \u001b[0;32mIn [62]\u001b[0m, in \u001b[0;36malt_from_code\u001b[0;34m(code)\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21malt_from_code\u001b[39m(code):\n\u001b[1;32m 2\u001b[0m input_ids \u001b[38;5;241m=\u001b[39m tokenizer(code, return_tensors\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpt\u001b[39m\u001b[38;5;124m\"\u001b[39m)\u001b[38;5;241m.\u001b[39minput_ids\n\u001b[0;32m----> 3\u001b[0m generated_ids \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgenerate\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnum_return_sequences\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m100\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmax_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m20\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdo_sample\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtemperature\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1.0\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 4\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m filter_codes(tokenizer\u001b[38;5;241m.\u001b[39mbatch_decode(generated_ids, skip_special_tokens\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m))\n",
412
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/torch/autograd/grad_mode.py:28\u001b[0m, in \u001b[0;36m_DecoratorContextManager.__call__.<locals>.decorate_context\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[38;5;129m@functools\u001b[39m\u001b[38;5;241m.\u001b[39mwraps(func)\n\u001b[1;32m 26\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mdecorate_context\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 27\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__class__\u001b[39m():\n\u001b[0;32m---> 28\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
413
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/transformers/generation_utils.py:1200\u001b[0m, in \u001b[0;36mGenerationMixin.generate\u001b[0;34m(self, inputs, max_length, min_length, do_sample, early_stopping, num_beams, temperature, top_k, top_p, repetition_penalty, bad_words_ids, bos_token_id, pad_token_id, eos_token_id, length_penalty, no_repeat_ngram_size, encoder_no_repeat_ngram_size, num_return_sequences, max_time, max_new_tokens, decoder_start_token_id, use_cache, num_beam_groups, diversity_penalty, prefix_allowed_tokens_fn, logits_processor, stopping_criteria, output_attentions, output_hidden_states, output_scores, return_dict_in_generate, forced_bos_token_id, forced_eos_token_id, remove_invalid_values, synced_gpus, **model_kwargs)\u001b[0m\n\u001b[1;32m 1192\u001b[0m input_ids, model_kwargs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_expand_inputs_for_generation(\n\u001b[1;32m 1193\u001b[0m input_ids,\n\u001b[1;32m 1194\u001b[0m expand_size\u001b[38;5;241m=\u001b[39mnum_return_sequences,\n\u001b[1;32m 1195\u001b[0m is_encoder_decoder\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconfig\u001b[38;5;241m.\u001b[39mis_encoder_decoder,\n\u001b[1;32m 1196\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mmodel_kwargs,\n\u001b[1;32m 1197\u001b[0m )\n\u001b[1;32m 1199\u001b[0m \u001b[38;5;66;03m# 12. run sample\u001b[39;00m\n\u001b[0;32m-> 1200\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msample\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1201\u001b[0m \u001b[43m \u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1202\u001b[0m \u001b[43m \u001b[49m\u001b[43mlogits_processor\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlogits_processor\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1203\u001b[0m \u001b[43m \u001b[49m\u001b[43mlogits_warper\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlogits_warper\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1204\u001b[0m \u001b[43m \u001b[49m\u001b[43mstopping_criteria\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstopping_criteria\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1205\u001b[0m \u001b[43m \u001b[49m\u001b[43mpad_token_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpad_token_id\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1206\u001b[0m \u001b[43m \u001b[49m\u001b[43meos_token_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meos_token_id\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1207\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_scores\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_scores\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1208\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_dict_in_generate\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_dict_in_generate\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1209\u001b[0m \u001b[43m \u001b[49m\u001b[43msynced_gpus\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msynced_gpus\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1210\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mmodel_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1211\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1213\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m is_beam_gen_mode:\n\u001b[1;32m 1214\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m num_return_sequences \u001b[38;5;241m>\u001b[39m num_beams:\n",
414
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/transformers/generation_utils.py:1710\u001b[0m, in \u001b[0;36mGenerationMixin.sample\u001b[0;34m(self, input_ids, logits_processor, stopping_criteria, logits_warper, max_length, pad_token_id, eos_token_id, output_attentions, output_hidden_states, output_scores, return_dict_in_generate, synced_gpus, **model_kwargs)\u001b[0m\n\u001b[1;32m 1707\u001b[0m model_inputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mprepare_inputs_for_generation(input_ids, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mmodel_kwargs)\n\u001b[1;32m 1709\u001b[0m \u001b[38;5;66;03m# forward pass to get next token\u001b[39;00m\n\u001b[0;32m-> 1710\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1711\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mmodel_inputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1712\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_dict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 1713\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1714\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_hidden_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1715\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1717\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m synced_gpus \u001b[38;5;129;01mand\u001b[39;00m this_peer_finished:\n\u001b[1;32m 1718\u001b[0m cur_len \u001b[38;5;241m=\u001b[39m cur_len \u001b[38;5;241m+\u001b[39m \u001b[38;5;241m1\u001b[39m\n",
415
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/torch/nn/modules/module.py:1102\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1098\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1099\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1100\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1101\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1102\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1103\u001b[0m \u001b[38;5;66;03m# Do not call functions when jit is used\u001b[39;00m\n\u001b[1;32m 1104\u001b[0m full_backward_hooks, non_full_backward_hooks \u001b[38;5;241m=\u001b[39m [], []\n",
416
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/transformers/models/t5/modeling_t5.py:1616\u001b[0m, in \u001b[0;36mT5ForConditionalGeneration.forward\u001b[0;34m(self, input_ids, attention_mask, decoder_input_ids, decoder_attention_mask, head_mask, decoder_head_mask, cross_attn_head_mask, encoder_outputs, past_key_values, inputs_embeds, decoder_inputs_embeds, labels, use_cache, output_attentions, output_hidden_states, return_dict)\u001b[0m\n\u001b[1;32m 1613\u001b[0m decoder_attention_mask \u001b[38;5;241m=\u001b[39m decoder_attention_mask\u001b[38;5;241m.\u001b[39mto(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdecoder\u001b[38;5;241m.\u001b[39mfirst_device)\n\u001b[1;32m 1615\u001b[0m \u001b[38;5;66;03m# Decode\u001b[39;00m\n\u001b[0;32m-> 1616\u001b[0m decoder_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdecoder\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1617\u001b[0m \u001b[43m \u001b[49m\u001b[43minput_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecoder_input_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1618\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecoder_attention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1619\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs_embeds\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecoder_inputs_embeds\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1620\u001b[0m \u001b[43m \u001b[49m\u001b[43mpast_key_values\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpast_key_values\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1621\u001b[0m \u001b[43m \u001b[49m\u001b[43mencoder_hidden_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1622\u001b[0m \u001b[43m \u001b[49m\u001b[43mencoder_attention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1623\u001b[0m \u001b[43m \u001b[49m\u001b[43mhead_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecoder_head_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1624\u001b[0m \u001b[43m \u001b[49m\u001b[43mcross_attn_head_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcross_attn_head_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1625\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cache\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cache\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1626\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1627\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_hidden_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1628\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_dict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1629\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1631\u001b[0m sequence_output \u001b[38;5;241m=\u001b[39m decoder_outputs[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 1633\u001b[0m \u001b[38;5;66;03m# Set device for model parallelism\u001b[39;00m\n",
417
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/torch/nn/modules/module.py:1102\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1098\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1099\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1100\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1101\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1102\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1103\u001b[0m \u001b[38;5;66;03m# Do not call functions when jit is used\u001b[39;00m\n\u001b[1;32m 1104\u001b[0m full_backward_hooks, non_full_backward_hooks \u001b[38;5;241m=\u001b[39m [], []\n",
418
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/transformers/models/t5/modeling_t5.py:1011\u001b[0m, in \u001b[0;36mT5Stack.forward\u001b[0;34m(self, input_ids, attention_mask, encoder_hidden_states, encoder_attention_mask, inputs_embeds, head_mask, cross_attn_head_mask, past_key_values, use_cache, output_attentions, output_hidden_states, return_dict)\u001b[0m\n\u001b[1;32m 998\u001b[0m layer_outputs \u001b[38;5;241m=\u001b[39m checkpoint(\n\u001b[1;32m 999\u001b[0m create_custom_forward(layer_module),\n\u001b[1;32m 1000\u001b[0m hidden_states,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1008\u001b[0m \u001b[38;5;28;01mNone\u001b[39;00m, \u001b[38;5;66;03m# past_key_value is always None with gradient checkpointing\u001b[39;00m\n\u001b[1;32m 1009\u001b[0m )\n\u001b[1;32m 1010\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1011\u001b[0m layer_outputs \u001b[38;5;241m=\u001b[39m \u001b[43mlayer_module\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1012\u001b[0m \u001b[43m \u001b[49m\u001b[43mhidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1013\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextended_attention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1014\u001b[0m \u001b[43m \u001b[49m\u001b[43mposition_bias\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mposition_bias\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1015\u001b[0m \u001b[43m \u001b[49m\u001b[43mencoder_hidden_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mencoder_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1016\u001b[0m \u001b[43m \u001b[49m\u001b[43mencoder_attention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mencoder_extended_attention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1017\u001b[0m \u001b[43m \u001b[49m\u001b[43mencoder_decoder_position_bias\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mencoder_decoder_position_bias\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1018\u001b[0m \u001b[43m \u001b[49m\u001b[43mlayer_head_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlayer_head_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1019\u001b[0m \u001b[43m \u001b[49m\u001b[43mcross_attn_layer_head_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcross_attn_layer_head_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1020\u001b[0m \u001b[43m \u001b[49m\u001b[43mpast_key_value\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpast_key_value\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1021\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cache\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cache\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1022\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1023\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1025\u001b[0m \u001b[38;5;66;03m# layer_outputs is a tuple with:\u001b[39;00m\n\u001b[1;32m 1026\u001b[0m \u001b[38;5;66;03m# hidden-states, key-value-states, (self-attention position bias), (self-attention weights), (cross-attention position bias), (cross-attention weights)\u001b[39;00m\n\u001b[1;32m 1027\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m use_cache \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mFalse\u001b[39;00m:\n",
419
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/torch/nn/modules/module.py:1102\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1098\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1099\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1100\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1101\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1102\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1103\u001b[0m \u001b[38;5;66;03m# Do not call functions when jit is used\u001b[39;00m\n\u001b[1;32m 1104\u001b[0m full_backward_hooks, non_full_backward_hooks \u001b[38;5;241m=\u001b[39m [], []\n",
420
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/transformers/models/t5/modeling_t5.py:672\u001b[0m, in \u001b[0;36mT5Block.forward\u001b[0;34m(self, hidden_states, attention_mask, position_bias, encoder_hidden_states, encoder_attention_mask, encoder_decoder_position_bias, layer_head_mask, cross_attn_layer_head_mask, past_key_value, use_cache, output_attentions, return_dict)\u001b[0m\n\u001b[1;32m 669\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 670\u001b[0m query_length \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m--> 672\u001b[0m cross_attention_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlayer\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 673\u001b[0m \u001b[43m \u001b[49m\u001b[43mhidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 674\u001b[0m \u001b[43m \u001b[49m\u001b[43mkey_value_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mencoder_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 675\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mencoder_attention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 676\u001b[0m \u001b[43m \u001b[49m\u001b[43mposition_bias\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mencoder_decoder_position_bias\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 677\u001b[0m \u001b[43m \u001b[49m\u001b[43mlayer_head_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcross_attn_layer_head_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 678\u001b[0m \u001b[43m \u001b[49m\u001b[43mpast_key_value\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcross_attn_past_key_value\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 679\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mquery_length\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 680\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cache\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cache\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 681\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 682\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 683\u001b[0m hidden_states \u001b[38;5;241m=\u001b[39m cross_attention_outputs[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 685\u001b[0m \u001b[38;5;66;03m# clamp inf values to enable fp16 training\u001b[39;00m\n",
421
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/torch/nn/modules/module.py:1102\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1098\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1099\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1100\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1101\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1102\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1103\u001b[0m \u001b[38;5;66;03m# Do not call functions when jit is used\u001b[39;00m\n\u001b[1;32m 1104\u001b[0m full_backward_hooks, non_full_backward_hooks \u001b[38;5;241m=\u001b[39m [], []\n",
422
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/transformers/models/t5/modeling_t5.py:587\u001b[0m, in \u001b[0;36mT5LayerCrossAttention.forward\u001b[0;34m(self, hidden_states, key_value_states, attention_mask, position_bias, layer_head_mask, past_key_value, use_cache, query_length, output_attentions)\u001b[0m\n\u001b[1;32m 574\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\n\u001b[1;32m 575\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 576\u001b[0m hidden_states,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 584\u001b[0m output_attentions\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[1;32m 585\u001b[0m ):\n\u001b[1;32m 586\u001b[0m normed_hidden_states \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlayer_norm(hidden_states)\n\u001b[0;32m--> 587\u001b[0m attention_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mEncDecAttention\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 588\u001b[0m \u001b[43m \u001b[49m\u001b[43mnormed_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 589\u001b[0m \u001b[43m \u001b[49m\u001b[43mmask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 590\u001b[0m \u001b[43m \u001b[49m\u001b[43mkey_value_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mkey_value_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 591\u001b[0m \u001b[43m \u001b[49m\u001b[43mposition_bias\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mposition_bias\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 592\u001b[0m \u001b[43m \u001b[49m\u001b[43mlayer_head_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlayer_head_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 593\u001b[0m \u001b[43m \u001b[49m\u001b[43mpast_key_value\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpast_key_value\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 594\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cache\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cache\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 595\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mquery_length\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 596\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 597\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 598\u001b[0m layer_output \u001b[38;5;241m=\u001b[39m hidden_states \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdropout(attention_output[\u001b[38;5;241m0\u001b[39m])\n\u001b[1;32m 599\u001b[0m outputs \u001b[38;5;241m=\u001b[39m (layer_output,) \u001b[38;5;241m+\u001b[39m attention_output[\u001b[38;5;241m1\u001b[39m:] \u001b[38;5;66;03m# add attentions if we output them\u001b[39;00m\n",
423
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/torch/nn/modules/module.py:1102\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1098\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1099\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1100\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1101\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1102\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1103\u001b[0m \u001b[38;5;66;03m# Do not call functions when jit is used\u001b[39;00m\n\u001b[1;32m 1104\u001b[0m full_backward_hooks, non_full_backward_hooks \u001b[38;5;241m=\u001b[39m [], []\n",
424
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/transformers/models/t5/modeling_t5.py:525\u001b[0m, in \u001b[0;36mT5Attention.forward\u001b[0;34m(self, hidden_states, mask, key_value_states, position_bias, past_key_value, layer_head_mask, query_length, use_cache, output_attentions)\u001b[0m\n\u001b[1;32m 522\u001b[0m attn_weights \u001b[38;5;241m=\u001b[39m attn_weights \u001b[38;5;241m*\u001b[39m layer_head_mask\n\u001b[1;32m 524\u001b[0m attn_output \u001b[38;5;241m=\u001b[39m unshape(torch\u001b[38;5;241m.\u001b[39mmatmul(attn_weights, value_states)) \u001b[38;5;66;03m# (batch_size, seq_length, dim)\u001b[39;00m\n\u001b[0;32m--> 525\u001b[0m attn_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mo\u001b[49m\u001b[43m(\u001b[49m\u001b[43mattn_output\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 527\u001b[0m present_key_value_state \u001b[38;5;241m=\u001b[39m (key_states, value_states) \u001b[38;5;28;01mif\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mis_decoder \u001b[38;5;129;01mand\u001b[39;00m use_cache) \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 528\u001b[0m outputs \u001b[38;5;241m=\u001b[39m (attn_output,) \u001b[38;5;241m+\u001b[39m (present_key_value_state,) \u001b[38;5;241m+\u001b[39m (position_bias,)\n",
425
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/torch/nn/modules/module.py:1102\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1098\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1099\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1100\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1101\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1102\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1103\u001b[0m \u001b[38;5;66;03m# Do not call functions when jit is used\u001b[39;00m\n\u001b[1;32m 1104\u001b[0m full_backward_hooks, non_full_backward_hooks \u001b[38;5;241m=\u001b[39m [], []\n",
426
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/torch/nn/modules/linear.py:103\u001b[0m, in \u001b[0;36mLinear.forward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 102\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m--> 103\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlinear\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n",
427
+ "File \u001b[0;32m~/.pyenv/versions/3.9.9/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/torch/nn/functional.py:1848\u001b[0m, in \u001b[0;36mlinear\u001b[0;34m(input, weight, bias)\u001b[0m\n\u001b[1;32m 1846\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_variadic(\u001b[38;5;28minput\u001b[39m, weight, bias):\n\u001b[1;32m 1847\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(linear, (\u001b[38;5;28minput\u001b[39m, weight, bias), \u001b[38;5;28minput\u001b[39m, weight, bias\u001b[38;5;241m=\u001b[39mbias)\n\u001b[0;32m-> 1848\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_C\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_nn\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlinear\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n",
428
+ "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
429
+ ]
430
+ }
431
+ ],
432
  "source": [
433
+ "import json, gzip\n",
434
+ "from tqdm import tqdm\n",
435
+ "\n",
436
+ "\n",
437
+ "with open('data.single_start_alts.jsonl.gz', 'w') as f:\n",
438
+ " f.write('')\n",
439
+ "\n",
440
+ "\n",
441
+ "def write_rows_compressed(rows):\n",
442
+ " rows = [json.dumps(r) for r in rows]\n",
443
+ " with gzip.open('data.alts.jsonl.gz', 'ab') as f:\n",
444
+ " f.write('\\n'.join(rows).encode() + b'\\n')\n",
445
+ "\n",
446
  "\n",
447
+ "# currently takes ~10 seconds per iteration for 89,68,897 samples so 1k days\n",
448
  "with open('../data.jsonl', 'r', encoding=\"utf-8\") as f:\n",
449
+ " new_rows = []\n",
450
+ " for id_, line in tqdm(enumerate(f), total=8968897):\n",
451
  " row = json.loads(line)\n",
452
  " alts = make_alternatives(row)\n",
453
+ " new_rows += [row] + alts\n",
454
+ " if new_rows and len(new_rows) % 10_000 == 0:\n",
455
+ " write_rows_compressed(new_rows)\n",
456
+ " new_rows = []\n",
457
+ " break\n"
458
  ]
459
  },
460
  {