matt-tries-dl commited on
Commit
4f1cd24
1 Parent(s): b444d89

trained adaptor

Browse files
llama_test.ipynb CHANGED
@@ -39,15 +39,31 @@
39
  "name": "stderr",
40
  "output_type": "stream",
41
  "text": [
 
 
42
  "The tokenizer class you load from this checkpoint is not the same type as the class this function is called from. It may result in unexpected tokenization. \n",
43
  "The tokenizer class you load from this checkpoint is 'LLaMATokenizer'. \n",
44
  "The class this function is called from is 'LlamaTokenizer'.\n"
45
  ]
46
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  {
48
  "data": {
49
  "application/vnd.jupyter.widget-view+json": {
50
- "model_id": "3ab80e2a1c0744e0af747ba63429a2af",
51
  "version_major": 2,
52
  "version_minor": 0
53
  },
@@ -61,6 +77,7 @@
61
  ],
62
  "source": [
63
  "from transformers import LlamaTokenizer, LlamaForCausalLM\n",
 
64
  "tokenizer = LlamaTokenizer.from_pretrained(\n",
65
  " \"decapoda-research/llama-7b-hf\")\n",
66
  " \n",
@@ -68,9 +85,12 @@
68
  "#tokenizer.padding_side = 'left'\n",
69
  "model = LlamaForCausalLM.from_pretrained(\n",
70
  " \"decapoda-research/llama-7b-hf\",\n",
 
71
  " device_map=\"auto\",\n",
72
  " torch_dtype=torch.float16\n",
73
- ")\n"
 
 
74
  ]
75
  },
76
  {
@@ -168,7 +188,7 @@
168
  },
169
  {
170
  "cell_type": "code",
171
- "execution_count": 4,
172
  "metadata": {},
173
  "outputs": [
174
  {
@@ -232,7 +252,7 @@
232
  },
233
  {
234
  "cell_type": "code",
235
- "execution_count": 5,
236
  "metadata": {},
237
  "outputs": [
238
  {
@@ -241,34 +261,35 @@
241
  "text": [
242
  "\n",
243
  "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n",
244
- "### Question: What is the Displacement of the Iveco F1CE3481E Engine?\n",
245
- "### Input: Table 2-1415821-6 has columns Model (text),Engine (text),Displacement (text),Valvetrain (text),Fuel system (text),Max. power at rpm (text),Max. torque at rpm (text). \n",
246
- "### Answer: SELECT Displacement FROM 2-1415821-6 WHERE Engine = 'iveco f1ce3481e'\n",
247
  "\n",
248
  "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n",
249
- "### Question: What is the record of team utah?\n",
250
- "### Input: Table 2-17355628-9 has columns Game (real),Date (text),Team (text),Score (text),High points (text),High rebounds (text),High assists (text),Location Attendance (text),Record (text). \n",
251
- "### Answer: SELECT Record FROM 2-17355628-9 WHERE Team = 'utah'\n",
252
  "\n",
253
  "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n",
254
- "### Question: What is the home of the team with a 16-8 record?\n",
255
- "### Input: Table 2-16188254-4 has columns Date (text),Visitor (text),Score (text),Home (text),Leading scorer (text),Attendance (text),Record (text). \n",
256
- "### Answer: SELECT Home FROM 2-16188254-4 WHERE Record = '16-8'\n",
257
  "\n",
258
  "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n",
259
- "### Question: What week did the Galaxy play the Amsterdam Admirals?\n",
260
- "### Input: Table 1-24814477-2 has columns Week (real),Date (text),Kickoff (text),Opponent (text),Final score (text),Team record (text),Game site (text),Attendance (real). \n",
261
- "### Answer: SELECT Week FROM 1-24814477-2 WHERE Opponent = 'Amsterdam Admirals'\n",
262
  "\n",
263
  "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n",
264
- "### Question: How many caps did Mitchell Duke have overall?\n",
265
- "### Input: Table 2-1257177-1 has columns Player (text),Country (text),Caps (real),Goals (text),Years Active (text). \n",
266
- "### Answer: SELECT COUNT Caps FROM 2-1257177-1 WHERE Player = 'mitchell duke'\n"
267
  ]
268
  }
269
  ],
270
  "source": [
271
  "import random\n",
 
272
  "\n",
273
  "# defined by WikiSQL\n",
274
  "\n",
@@ -366,13 +387,13 @@
366
  },
367
  {
368
  "cell_type": "code",
369
- "execution_count": 26,
370
  "metadata": {},
371
  "outputs": [
372
  {
373
  "data": {
374
  "application/vnd.jupyter.widget-view+json": {
375
- "model_id": "4f44918087484dd58b958a64cabdecb6",
376
  "version_major": 2,
377
  "version_minor": 0
378
  },
@@ -407,7 +428,7 @@
407
  " target_modules=['q_proj','v_proj']\n",
408
  ")\n",
409
  "\n",
410
- "modad = get_peft_model(model,lora_cfg)\n",
411
  "\n",
412
  "tokenizer.pad_token_id = 0\n",
413
  "\n",
@@ -422,7 +443,7 @@
422
  " padding=\"max_length\"\n",
423
  " ))\n",
424
  "\n",
425
- "#data.remove_columns('prompt')\n",
426
  "\n",
427
  "targs = transformers.TrainingArguments(\n",
428
  " per_device_train_batch_size=MICRO_BATCH,\n",
@@ -438,7 +459,7 @@
438
  ")\n",
439
  "\n",
440
  "\n",
441
- "modad.config.use_cache = False"
442
  ]
443
  },
444
  {
@@ -451,7 +472,7 @@
451
  },
452
  {
453
  "cell_type": "code",
454
- "execution_count": 27,
455
  "metadata": {},
456
  "outputs": [
457
  {
@@ -459,10 +480,10 @@
459
  "output_type": "stream",
460
  "text": [
461
  "Dataset({\n",
462
- " features: ['prompt', 'input_ids', 'attention_mask'],\n",
463
  " num_rows: 56355\n",
464
  "})\n",
465
- "{'prompt': \"Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\\n### Question: Tell me what the notes are for South Australia \\n### Input: Table 1-1000181-1 has columns State/territory (text),Text/background colour (text),Format (text),Current slogan (text),Current series (text),Notes (text). \\n### Answer: SELECT Notes FROM 1-1000181-1 WHERE Current slogan = 'SOUTH AUSTRALIA'\", 'input_ids': [0, 13866, 338, 263, 1139, 393, 16612, 263, 848, 2009, 29892, 3300, 2859, 411, 385, 1881, 393, 16612, 263, 3758, 1591, 29889, 29871, 14350, 263, 3758, 2346, 393, 5663, 17180, 278, 848, 29889, 13, 2277, 29937, 894, 29901, 24948, 592, 825, 278, 11486, 526, 363, 4275, 8314, 29871, 13, 2277, 29937, 10567, 29901, 6137, 29871, 29896, 29899, 29896, 29900, 29900, 29900, 29896, 29947, 29896, 29899, 29896, 756, 4341, 4306, 29914, 357, 768, 706, 313, 726, 511, 1626, 29914, 7042, 12384, 313, 726, 511, 5809, 313, 726, 511, 7583, 269, 1188, 273, 313, 726, 511, 7583, 3652, 313, 726, 511, 3664, 267, 313, 726, 467, 259, 13, 2277, 29937, 673, 29901, 5097, 29871, 8695, 3895, 29871, 29896, 29899, 29896, 29900, 29900, 29900, 29896, 29947, 29896, 29899, 29896, 5754, 9626, 269, 1188, 273, 353, 525, 6156, 2692, 29950, 319, 29965, 10810, 1964, 10764, 29915, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'attention_mask': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]}\n"
466
  ]
467
  }
468
  ],
@@ -483,46 +504,1802 @@
483
  },
484
  {
485
  "cell_type": "code",
486
- "execution_count": 25,
487
  "metadata": {},
488
  "outputs": [
489
  {
490
- "name": "stderr",
491
- "output_type": "stream",
492
- "text": [
493
- "/home/matt/hf/sqllama-V0/.venv/lib/python3.7/site-packages/transformers/optimization.py:395: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n",
494
- " FutureWarning,\n"
495
- ]
496
- },
497
- {
498
- "ename": "ValueError",
499
- "evalue": "Unable to create tensor, you should probably activate truncation and/or padding with 'padding=True' 'truncation=True' to have batched tensors with the same length. Perhaps your features (`prompt` in this case) have excessive nesting (inputs type `list` where type `int` is expected).",
500
- "output_type": "error",
501
- "traceback": [
502
- "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
503
- "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
504
- "\u001b[0;32m~/hf/sqllama-V0/.venv/lib/python3.7/site-packages/transformers/tokenization_utils_base.py\u001b[0m in \u001b[0;36mconvert_to_tensors\u001b[0;34m(self, tensor_type, prepend_batch_axis)\u001b[0m\n\u001b[1;32m 716\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mis_tensor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 717\u001b[0;31m \u001b[0mtensor\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mas_tensor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 718\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
505
- "\u001b[0;31mValueError\u001b[0m: too many dimensions 'str'",
506
- "\nThe above exception was the direct cause of the following exception:\n",
507
- "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
508
- "\u001b[0;32m/var/tmp/ipykernel_2309/3549391384.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mdata_collator\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtransformers\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mDataCollatorForLanguageModeling\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtokenizer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmlm\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m )\n\u001b[0;32m----> 7\u001b[0;31m \u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mresume_from_checkpoint\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 8\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msave_pretrained\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'sqllama-out'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
509
- "\u001b[0;32m~/hf/sqllama-V0/.venv/lib/python3.7/site-packages/transformers/trainer.py\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(self, resume_from_checkpoint, trial, ignore_keys_for_eval, **kwargs)\u001b[0m\n\u001b[1;32m 1664\u001b[0m \u001b[0mresume_from_checkpoint\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mresume_from_checkpoint\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1665\u001b[0m \u001b[0mtrial\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtrial\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1666\u001b[0;31m \u001b[0mignore_keys_for_eval\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mignore_keys_for_eval\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1667\u001b[0m )\n\u001b[1;32m 1668\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
510
- "\u001b[0;32m~/hf/sqllama-V0/.venv/lib/python3.7/site-packages/transformers/trainer.py\u001b[0m in \u001b[0;36m_inner_training_loop\u001b[0;34m(self, batch_size, args, resume_from_checkpoint, trial, ignore_keys_for_eval)\u001b[0m\n\u001b[1;32m 1897\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1898\u001b[0m \u001b[0mstep\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1899\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0mstep\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minputs\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mepoch_iterator\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1900\u001b[0m \u001b[0mtotal_batched_samples\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1901\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrng_to_sync\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
511
- "\u001b[0;32m~/hf/sqllama-V0/.venv/lib/python3.7/site-packages/torch/utils/data/dataloader.py\u001b[0m in \u001b[0;36m__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 626\u001b[0m \u001b[0;31m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 627\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_reset\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# type: ignore[call-arg]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 628\u001b[0;31m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_next_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 629\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_num_yielded\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 630\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dataset_kind\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0m_DatasetKind\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mIterable\u001b[0m \u001b[0;32mand\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m\\\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
512
- "\u001b[0;32m~/hf/sqllama-V0/.venv/lib/python3.7/site-packages/torch/utils/data/dataloader.py\u001b[0m in \u001b[0;36m_next_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 669\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_next_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 670\u001b[0m \u001b[0mindex\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_next_index\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# may raise StopIteration\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 671\u001b[0;31m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dataset_fetcher\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# may raise StopIteration\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 672\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_pin_memory\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 673\u001b[0m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_utils\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpin_memory\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpin_memory\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_pin_memory_device\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
513
- "\u001b[0;32m~/hf/sqllama-V0/.venv/lib/python3.7/site-packages/torch/utils/data/_utils/fetch.py\u001b[0m in \u001b[0;36mfetch\u001b[0;34m(self, possibly_batched_index)\u001b[0m\n\u001b[1;32m 59\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 60\u001b[0m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdataset\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mpossibly_batched_index\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 61\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcollate_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
514
- "\u001b[0;32m~/hf/sqllama-V0/.venv/lib/python3.7/site-packages/transformers/data/data_collator.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, features, return_tensors)\u001b[0m\n\u001b[1;32m 43\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtf_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfeatures\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 44\u001b[0m \u001b[0;32melif\u001b[0m \u001b[0mreturn_tensors\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m\"pt\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 45\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtorch_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfeatures\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 46\u001b[0m \u001b[0;32melif\u001b[0m \u001b[0mreturn_tensors\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m\"np\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnumpy_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfeatures\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
515
- "\u001b[0;32m~/hf/sqllama-V0/.venv/lib/python3.7/site-packages/transformers/data/data_collator.py\u001b[0m in \u001b[0;36mtorch_call\u001b[0;34m(self, examples)\u001b[0m\n\u001b[1;32m 727\u001b[0m \u001b[0;31m# Handle dict or lists with proper padding and conversion to tensor.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 728\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mexamples\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mMapping\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 729\u001b[0;31m \u001b[0mbatch\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtokenizer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpad\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mexamples\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mreturn_tensors\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"pt\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpad_to_multiple_of\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpad_to_multiple_of\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 730\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 731\u001b[0m batch = {\n",
516
- "\u001b[0;32m~/hf/sqllama-V0/.venv/lib/python3.7/site-packages/transformers/tokenization_utils_base.py\u001b[0m in \u001b[0;36mpad\u001b[0;34m(self, encoded_inputs, padding, max_length, pad_to_multiple_of, return_attention_mask, return_tensors, verbose)\u001b[0m\n\u001b[1;32m 3033\u001b[0m \u001b[0mbatch_outputs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3034\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 3035\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mBatchEncoding\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_outputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtensor_type\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mreturn_tensors\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3036\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3037\u001b[0m def create_token_type_ids_from_sequences(\n",
517
- "\u001b[0;32m~/hf/sqllama-V0/.venv/lib/python3.7/site-packages/transformers/tokenization_utils_base.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, data, encoding, tensor_type, prepend_batch_axis, n_sequences)\u001b[0m\n\u001b[1;32m 208\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_n_sequences\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mn_sequences\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 209\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 210\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconvert_to_tensors\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtensor_type\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtensor_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mprepend_batch_axis\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mprepend_batch_axis\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 211\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 212\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mproperty\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
518
- "\u001b[0;32m~/hf/sqllama-V0/.venv/lib/python3.7/site-packages/transformers/tokenization_utils_base.py\u001b[0m in \u001b[0;36mconvert_to_tensors\u001b[0;34m(self, tensor_type, prepend_batch_axis)\u001b[0m\n\u001b[1;32m 736\u001b[0m \u001b[0;34mf\" features (`{key}` in this case) have excessive nesting (inputs type `list` where type `int` is\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 737\u001b[0m \u001b[0;34m\" expected).\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 738\u001b[0;31m ) from e\n\u001b[0m\u001b[1;32m 739\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 740\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
519
- "\u001b[0;31mValueError\u001b[0m: Unable to create tensor, you should probably activate truncation and/or padding with 'padding=True' 'truncation=True' to have batched tensors with the same length. Perhaps your features (`prompt` in this case) have excessive nesting (inputs type `list` where type `int` is expected)."
520
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
521
  }
522
  ],
523
  "source": [
 
 
524
  "trainer = transformers.Trainer(\n",
525
- " model = modad,\n",
526
  " train_dataset = data,\n",
527
  " args = targs,\n",
528
  " data_collator=transformers.DataCollatorForLanguageModeling(tokenizer, mlm=False)\n",
 
39
  "name": "stderr",
40
  "output_type": "stream",
41
  "text": [
42
+ "/home/matt/hf/sqllama-V0/.venv/lib/python3.7/site-packages/bitsandbytes/cuda_setup/main.py:136: UserWarning: /opt/conda did not contain libcudart.so as expected! Searching further paths...\n",
43
+ " warn(msg)\n",
44
  "The tokenizer class you load from this checkpoint is not the same type as the class this function is called from. It may result in unexpected tokenization. \n",
45
  "The tokenizer class you load from this checkpoint is 'LLaMATokenizer'. \n",
46
  "The class this function is called from is 'LlamaTokenizer'.\n"
47
  ]
48
  },
49
+ {
50
+ "name": "stdout",
51
+ "output_type": "stream",
52
+ "text": [
53
+ "\n",
54
+ "===================================BUG REPORT===================================\n",
55
+ "Welcome to bitsandbytes. For bug reports, please submit your error trace to: https://github.com/TimDettmers/bitsandbytes/issues\n",
56
+ "================================================================================\n",
57
+ "CUDA SETUP: CUDA runtime path found: /usr/local/cuda/lib64/libcudart.so\n",
58
+ "CUDA SETUP: Highest compute capability among GPUs detected: 7.5\n",
59
+ "CUDA SETUP: Detected CUDA version 113\n",
60
+ "CUDA SETUP: Loading binary /home/matt/hf/sqllama-V0/.venv/lib/python3.7/site-packages/bitsandbytes/libbitsandbytes_cuda113.so...\n"
61
+ ]
62
+ },
63
  {
64
  "data": {
65
  "application/vnd.jupyter.widget-view+json": {
66
+ "model_id": "5616e727844a4f0b9efaff97aa2f9d75",
67
  "version_major": 2,
68
  "version_minor": 0
69
  },
 
77
  ],
78
  "source": [
79
  "from transformers import LlamaTokenizer, LlamaForCausalLM\n",
80
+ "from peft import prepare_model_for_int8_training\n",
81
  "tokenizer = LlamaTokenizer.from_pretrained(\n",
82
  " \"decapoda-research/llama-7b-hf\")\n",
83
  " \n",
 
85
  "#tokenizer.padding_side = 'left'\n",
86
  "model = LlamaForCausalLM.from_pretrained(\n",
87
  " \"decapoda-research/llama-7b-hf\",\n",
88
+ " load_in_8bit=True,\n",
89
  " device_map=\"auto\",\n",
90
  " torch_dtype=torch.float16\n",
91
+ ")\n",
92
+ "\n",
93
+ "model = prepare_model_for_int8_training(model)\n"
94
  ]
95
  },
96
  {
 
188
  },
189
  {
190
  "cell_type": "code",
191
+ "execution_count": 3,
192
  "metadata": {},
193
  "outputs": [
194
  {
 
252
  },
253
  {
254
  "cell_type": "code",
255
+ "execution_count": 4,
256
  "metadata": {},
257
  "outputs": [
258
  {
 
261
  "text": [
262
  "\n",
263
  "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n",
264
+ "### Question: How many games does novica veličković have when there's more than 24 rebounds?\n",
265
+ "### Input: Table 2-16050349-8 has columns Rank (real),Name (text),Team (text),Games (real),Rebounds (real). \n",
266
+ "### Answer: SELECT COUNT Games FROM 2-16050349-8 WHERE Name = 'novica veličković' AND Rebounds > 24\n",
267
  "\n",
268
  "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n",
269
+ "### Question: What is the number of capacity at somerset park?\n",
270
+ "### Input: Table 1-11206787-5 has columns Team (text),Stadium (text),Capacity (real),Highest (real),Lowest (real),Average (real). \n",
271
+ "### Answer: SELECT COUNT Capacity FROM 1-11206787-5 WHERE Stadium = 'Somerset Park'\n",
272
  "\n",
273
  "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n",
274
+ "### Question: What is the number & name with an Undergoing overhaul, restoration or repairs date?\n",
275
+ "### Input: Table 2-11913905-6 has columns Number & Name (text),Description (text),Livery (text),Owner(s) (text),Date (text). \n",
276
+ "### Answer: SELECT Number & Name FROM 2-11913905-6 WHERE Date = 'undergoing overhaul, restoration or repairs'\n",
277
  "\n",
278
  "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n",
279
+ "### Question: What year did Orlando have a School/Club team in Clemson?\n",
280
+ "### Input: Table 2-15621965-7 has columns Player (text),Nationality (text),Position (text),Years in Orlando (text),School/Club Team (text). \n",
281
+ "### Answer: SELECT Years in Orlando FROM 2-15621965-7 WHERE School/Club Team = 'clemson'\n",
282
  "\n",
283
  "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n",
284
+ "### Question: How many Deaths have a Fate of damaged, and a Tonnage (GRT) smaller than 4,917?\n",
285
+ "### Input: Table 2-18914307-1 has columns Date (text),Ship Name (text),Flag (text),Tonnage ( GRT ) (real),Fate (text),Deaths (real). \n",
286
+ "### Answer: SELECT COUNT Deaths FROM 2-18914307-1 WHERE Fate = 'damaged' AND Tonnage ( GRT ) < 4,917\n"
287
  ]
288
  }
289
  ],
290
  "source": [
291
  "import random\n",
292
+ "import json\n",
293
  "\n",
294
  "# defined by WikiSQL\n",
295
  "\n",
 
387
  },
388
  {
389
  "cell_type": "code",
390
+ "execution_count": 5,
391
  "metadata": {},
392
  "outputs": [
393
  {
394
  "data": {
395
  "application/vnd.jupyter.widget-view+json": {
396
+ "model_id": "431b36f60a3940cf8646e1bea4324745",
397
  "version_major": 2,
398
  "version_minor": 0
399
  },
 
428
  " target_modules=['q_proj','v_proj']\n",
429
  ")\n",
430
  "\n",
431
+ "model = get_peft_model(model,lora_cfg)\n",
432
  "\n",
433
  "tokenizer.pad_token_id = 0\n",
434
  "\n",
 
443
  " padding=\"max_length\"\n",
444
  " ))\n",
445
  "\n",
446
+ "data = data.remove_columns('prompt')\n",
447
  "\n",
448
  "targs = transformers.TrainingArguments(\n",
449
  " per_device_train_batch_size=MICRO_BATCH,\n",
 
459
  ")\n",
460
  "\n",
461
  "\n",
462
+ "model.config.use_cache = False"
463
  ]
464
  },
465
  {
 
472
  },
473
  {
474
  "cell_type": "code",
475
+ "execution_count": 6,
476
  "metadata": {},
477
  "outputs": [
478
  {
 
480
  "output_type": "stream",
481
  "text": [
482
  "Dataset({\n",
483
+ " features: ['input_ids', 'attention_mask'],\n",
484
  " num_rows: 56355\n",
485
  "})\n",
486
+ "{'input_ids': [0, 13866, 338, 263, 1139, 393, 16612, 263, 848, 2009, 29892, 3300, 2859, 411, 385, 1881, 393, 16612, 263, 3758, 1591, 29889, 29871, 14350, 263, 3758, 2346, 393, 5663, 17180, 278, 848, 29889, 13, 2277, 29937, 894, 29901, 24948, 592, 825, 278, 11486, 526, 363, 4275, 8314, 29871, 13, 2277, 29937, 10567, 29901, 6137, 29871, 29896, 29899, 29896, 29900, 29900, 29900, 29896, 29947, 29896, 29899, 29896, 756, 4341, 4306, 29914, 357, 768, 706, 313, 726, 511, 1626, 29914, 7042, 12384, 313, 726, 511, 5809, 313, 726, 511, 7583, 269, 1188, 273, 313, 726, 511, 7583, 3652, 313, 726, 511, 3664, 267, 313, 726, 467, 259, 13, 2277, 29937, 673, 29901, 5097, 29871, 8695, 3895, 29871, 29896, 29899, 29896, 29900, 29900, 29900, 29896, 29947, 29896, 29899, 29896, 5754, 9626, 269, 1188, 273, 353, 525, 6156, 2692, 29950, 319, 29965, 10810, 1964, 10764, 29915, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'attention_mask': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]}\n"
487
  ]
488
  }
489
  ],
 
504
  },
505
  {
506
  "cell_type": "code",
507
+ "execution_count": 6,
508
  "metadata": {},
509
  "outputs": [
510
  {
511
+ "data": {
512
+ "text/html": [
513
+ "\n",
514
+ " <div>\n",
515
+ " \n",
516
+ " <progress value='440' max='440' style='width:300px; height:20px; vertical-align: middle;'></progress>\n",
517
+ " [440/440 11:19:07, Epoch 0/1]\n",
518
+ " </div>\n",
519
+ " <table border=\"1\" class=\"dataframe\">\n",
520
+ " <thead>\n",
521
+ " <tr style=\"text-align: left;\">\n",
522
+ " <th>Step</th>\n",
523
+ " <th>Training Loss</th>\n",
524
+ " </tr>\n",
525
+ " </thead>\n",
526
+ " <tbody>\n",
527
+ " <tr>\n",
528
+ " <td>1</td>\n",
529
+ " <td>2.517200</td>\n",
530
+ " </tr>\n",
531
+ " <tr>\n",
532
+ " <td>2</td>\n",
533
+ " <td>2.482300</td>\n",
534
+ " </tr>\n",
535
+ " <tr>\n",
536
+ " <td>3</td>\n",
537
+ " <td>2.444100</td>\n",
538
+ " </tr>\n",
539
+ " <tr>\n",
540
+ " <td>4</td>\n",
541
+ " <td>2.456500</td>\n",
542
+ " </tr>\n",
543
+ " <tr>\n",
544
+ " <td>5</td>\n",
545
+ " <td>2.441400</td>\n",
546
+ " </tr>\n",
547
+ " <tr>\n",
548
+ " <td>6</td>\n",
549
+ " <td>2.484600</td>\n",
550
+ " </tr>\n",
551
+ " <tr>\n",
552
+ " <td>7</td>\n",
553
+ " <td>2.424000</td>\n",
554
+ " </tr>\n",
555
+ " <tr>\n",
556
+ " <td>8</td>\n",
557
+ " <td>2.477900</td>\n",
558
+ " </tr>\n",
559
+ " <tr>\n",
560
+ " <td>9</td>\n",
561
+ " <td>2.429700</td>\n",
562
+ " </tr>\n",
563
+ " <tr>\n",
564
+ " <td>10</td>\n",
565
+ " <td>2.436000</td>\n",
566
+ " </tr>\n",
567
+ " <tr>\n",
568
+ " <td>11</td>\n",
569
+ " <td>2.422000</td>\n",
570
+ " </tr>\n",
571
+ " <tr>\n",
572
+ " <td>12</td>\n",
573
+ " <td>2.408800</td>\n",
574
+ " </tr>\n",
575
+ " <tr>\n",
576
+ " <td>13</td>\n",
577
+ " <td>2.402900</td>\n",
578
+ " </tr>\n",
579
+ " <tr>\n",
580
+ " <td>14</td>\n",
581
+ " <td>2.424500</td>\n",
582
+ " </tr>\n",
583
+ " <tr>\n",
584
+ " <td>15</td>\n",
585
+ " <td>2.421800</td>\n",
586
+ " </tr>\n",
587
+ " <tr>\n",
588
+ " <td>16</td>\n",
589
+ " <td>2.424100</td>\n",
590
+ " </tr>\n",
591
+ " <tr>\n",
592
+ " <td>17</td>\n",
593
+ " <td>2.404000</td>\n",
594
+ " </tr>\n",
595
+ " <tr>\n",
596
+ " <td>18</td>\n",
597
+ " <td>2.386900</td>\n",
598
+ " </tr>\n",
599
+ " <tr>\n",
600
+ " <td>19</td>\n",
601
+ " <td>2.414400</td>\n",
602
+ " </tr>\n",
603
+ " <tr>\n",
604
+ " <td>20</td>\n",
605
+ " <td>2.370600</td>\n",
606
+ " </tr>\n",
607
+ " <tr>\n",
608
+ " <td>21</td>\n",
609
+ " <td>2.382500</td>\n",
610
+ " </tr>\n",
611
+ " <tr>\n",
612
+ " <td>22</td>\n",
613
+ " <td>2.350700</td>\n",
614
+ " </tr>\n",
615
+ " <tr>\n",
616
+ " <td>23</td>\n",
617
+ " <td>2.385700</td>\n",
618
+ " </tr>\n",
619
+ " <tr>\n",
620
+ " <td>24</td>\n",
621
+ " <td>2.350400</td>\n",
622
+ " </tr>\n",
623
+ " <tr>\n",
624
+ " <td>25</td>\n",
625
+ " <td>2.354900</td>\n",
626
+ " </tr>\n",
627
+ " <tr>\n",
628
+ " <td>26</td>\n",
629
+ " <td>2.345400</td>\n",
630
+ " </tr>\n",
631
+ " <tr>\n",
632
+ " <td>27</td>\n",
633
+ " <td>2.373000</td>\n",
634
+ " </tr>\n",
635
+ " <tr>\n",
636
+ " <td>28</td>\n",
637
+ " <td>2.343200</td>\n",
638
+ " </tr>\n",
639
+ " <tr>\n",
640
+ " <td>29</td>\n",
641
+ " <td>2.374300</td>\n",
642
+ " </tr>\n",
643
+ " <tr>\n",
644
+ " <td>30</td>\n",
645
+ " <td>2.325000</td>\n",
646
+ " </tr>\n",
647
+ " <tr>\n",
648
+ " <td>31</td>\n",
649
+ " <td>2.352000</td>\n",
650
+ " </tr>\n",
651
+ " <tr>\n",
652
+ " <td>32</td>\n",
653
+ " <td>2.344600</td>\n",
654
+ " </tr>\n",
655
+ " <tr>\n",
656
+ " <td>33</td>\n",
657
+ " <td>2.360000</td>\n",
658
+ " </tr>\n",
659
+ " <tr>\n",
660
+ " <td>34</td>\n",
661
+ " <td>2.347400</td>\n",
662
+ " </tr>\n",
663
+ " <tr>\n",
664
+ " <td>35</td>\n",
665
+ " <td>2.346700</td>\n",
666
+ " </tr>\n",
667
+ " <tr>\n",
668
+ " <td>36</td>\n",
669
+ " <td>2.329000</td>\n",
670
+ " </tr>\n",
671
+ " <tr>\n",
672
+ " <td>37</td>\n",
673
+ " <td>2.314600</td>\n",
674
+ " </tr>\n",
675
+ " <tr>\n",
676
+ " <td>38</td>\n",
677
+ " <td>2.306000</td>\n",
678
+ " </tr>\n",
679
+ " <tr>\n",
680
+ " <td>39</td>\n",
681
+ " <td>2.292600</td>\n",
682
+ " </tr>\n",
683
+ " <tr>\n",
684
+ " <td>40</td>\n",
685
+ " <td>2.333800</td>\n",
686
+ " </tr>\n",
687
+ " <tr>\n",
688
+ " <td>41</td>\n",
689
+ " <td>2.311500</td>\n",
690
+ " </tr>\n",
691
+ " <tr>\n",
692
+ " <td>42</td>\n",
693
+ " <td>2.308300</td>\n",
694
+ " </tr>\n",
695
+ " <tr>\n",
696
+ " <td>43</td>\n",
697
+ " <td>2.287400</td>\n",
698
+ " </tr>\n",
699
+ " <tr>\n",
700
+ " <td>44</td>\n",
701
+ " <td>2.314100</td>\n",
702
+ " </tr>\n",
703
+ " <tr>\n",
704
+ " <td>45</td>\n",
705
+ " <td>2.280400</td>\n",
706
+ " </tr>\n",
707
+ " <tr>\n",
708
+ " <td>46</td>\n",
709
+ " <td>2.261300</td>\n",
710
+ " </tr>\n",
711
+ " <tr>\n",
712
+ " <td>47</td>\n",
713
+ " <td>2.274200</td>\n",
714
+ " </tr>\n",
715
+ " <tr>\n",
716
+ " <td>48</td>\n",
717
+ " <td>2.246900</td>\n",
718
+ " </tr>\n",
719
+ " <tr>\n",
720
+ " <td>49</td>\n",
721
+ " <td>2.257100</td>\n",
722
+ " </tr>\n",
723
+ " <tr>\n",
724
+ " <td>50</td>\n",
725
+ " <td>2.274500</td>\n",
726
+ " </tr>\n",
727
+ " <tr>\n",
728
+ " <td>51</td>\n",
729
+ " <td>2.245500</td>\n",
730
+ " </tr>\n",
731
+ " <tr>\n",
732
+ " <td>52</td>\n",
733
+ " <td>2.250700</td>\n",
734
+ " </tr>\n",
735
+ " <tr>\n",
736
+ " <td>53</td>\n",
737
+ " <td>2.296600</td>\n",
738
+ " </tr>\n",
739
+ " <tr>\n",
740
+ " <td>54</td>\n",
741
+ " <td>2.261000</td>\n",
742
+ " </tr>\n",
743
+ " <tr>\n",
744
+ " <td>55</td>\n",
745
+ " <td>2.223800</td>\n",
746
+ " </tr>\n",
747
+ " <tr>\n",
748
+ " <td>56</td>\n",
749
+ " <td>2.244000</td>\n",
750
+ " </tr>\n",
751
+ " <tr>\n",
752
+ " <td>57</td>\n",
753
+ " <td>2.228500</td>\n",
754
+ " </tr>\n",
755
+ " <tr>\n",
756
+ " <td>58</td>\n",
757
+ " <td>2.229100</td>\n",
758
+ " </tr>\n",
759
+ " <tr>\n",
760
+ " <td>59</td>\n",
761
+ " <td>2.162300</td>\n",
762
+ " </tr>\n",
763
+ " <tr>\n",
764
+ " <td>60</td>\n",
765
+ " <td>2.238000</td>\n",
766
+ " </tr>\n",
767
+ " <tr>\n",
768
+ " <td>61</td>\n",
769
+ " <td>2.246000</td>\n",
770
+ " </tr>\n",
771
+ " <tr>\n",
772
+ " <td>62</td>\n",
773
+ " <td>2.184800</td>\n",
774
+ " </tr>\n",
775
+ " <tr>\n",
776
+ " <td>63</td>\n",
777
+ " <td>2.195000</td>\n",
778
+ " </tr>\n",
779
+ " <tr>\n",
780
+ " <td>64</td>\n",
781
+ " <td>2.199500</td>\n",
782
+ " </tr>\n",
783
+ " <tr>\n",
784
+ " <td>65</td>\n",
785
+ " <td>2.180000</td>\n",
786
+ " </tr>\n",
787
+ " <tr>\n",
788
+ " <td>66</td>\n",
789
+ " <td>2.179800</td>\n",
790
+ " </tr>\n",
791
+ " <tr>\n",
792
+ " <td>67</td>\n",
793
+ " <td>2.149700</td>\n",
794
+ " </tr>\n",
795
+ " <tr>\n",
796
+ " <td>68</td>\n",
797
+ " <td>2.177000</td>\n",
798
+ " </tr>\n",
799
+ " <tr>\n",
800
+ " <td>69</td>\n",
801
+ " <td>2.156600</td>\n",
802
+ " </tr>\n",
803
+ " <tr>\n",
804
+ " <td>70</td>\n",
805
+ " <td>2.193400</td>\n",
806
+ " </tr>\n",
807
+ " <tr>\n",
808
+ " <td>71</td>\n",
809
+ " <td>2.163400</td>\n",
810
+ " </tr>\n",
811
+ " <tr>\n",
812
+ " <td>72</td>\n",
813
+ " <td>2.147400</td>\n",
814
+ " </tr>\n",
815
+ " <tr>\n",
816
+ " <td>73</td>\n",
817
+ " <td>2.134700</td>\n",
818
+ " </tr>\n",
819
+ " <tr>\n",
820
+ " <td>74</td>\n",
821
+ " <td>2.133200</td>\n",
822
+ " </tr>\n",
823
+ " <tr>\n",
824
+ " <td>75</td>\n",
825
+ " <td>2.118000</td>\n",
826
+ " </tr>\n",
827
+ " <tr>\n",
828
+ " <td>76</td>\n",
829
+ " <td>2.139000</td>\n",
830
+ " </tr>\n",
831
+ " <tr>\n",
832
+ " <td>77</td>\n",
833
+ " <td>2.102000</td>\n",
834
+ " </tr>\n",
835
+ " <tr>\n",
836
+ " <td>78</td>\n",
837
+ " <td>2.109100</td>\n",
838
+ " </tr>\n",
839
+ " <tr>\n",
840
+ " <td>79</td>\n",
841
+ " <td>2.099000</td>\n",
842
+ " </tr>\n",
843
+ " <tr>\n",
844
+ " <td>80</td>\n",
845
+ " <td>2.097500</td>\n",
846
+ " </tr>\n",
847
+ " <tr>\n",
848
+ " <td>81</td>\n",
849
+ " <td>2.073200</td>\n",
850
+ " </tr>\n",
851
+ " <tr>\n",
852
+ " <td>82</td>\n",
853
+ " <td>2.055200</td>\n",
854
+ " </tr>\n",
855
+ " <tr>\n",
856
+ " <td>83</td>\n",
857
+ " <td>2.078100</td>\n",
858
+ " </tr>\n",
859
+ " <tr>\n",
860
+ " <td>84</td>\n",
861
+ " <td>2.104800</td>\n",
862
+ " </tr>\n",
863
+ " <tr>\n",
864
+ " <td>85</td>\n",
865
+ " <td>2.061100</td>\n",
866
+ " </tr>\n",
867
+ " <tr>\n",
868
+ " <td>86</td>\n",
869
+ " <td>2.066500</td>\n",
870
+ " </tr>\n",
871
+ " <tr>\n",
872
+ " <td>87</td>\n",
873
+ " <td>2.073500</td>\n",
874
+ " </tr>\n",
875
+ " <tr>\n",
876
+ " <td>88</td>\n",
877
+ " <td>2.010500</td>\n",
878
+ " </tr>\n",
879
+ " <tr>\n",
880
+ " <td>89</td>\n",
881
+ " <td>2.045700</td>\n",
882
+ " </tr>\n",
883
+ " <tr>\n",
884
+ " <td>90</td>\n",
885
+ " <td>2.026700</td>\n",
886
+ " </tr>\n",
887
+ " <tr>\n",
888
+ " <td>91</td>\n",
889
+ " <td>2.046500</td>\n",
890
+ " </tr>\n",
891
+ " <tr>\n",
892
+ " <td>92</td>\n",
893
+ " <td>2.015300</td>\n",
894
+ " </tr>\n",
895
+ " <tr>\n",
896
+ " <td>93</td>\n",
897
+ " <td>2.019100</td>\n",
898
+ " </tr>\n",
899
+ " <tr>\n",
900
+ " <td>94</td>\n",
901
+ " <td>2.008600</td>\n",
902
+ " </tr>\n",
903
+ " <tr>\n",
904
+ " <td>95</td>\n",
905
+ " <td>1.961000</td>\n",
906
+ " </tr>\n",
907
+ " <tr>\n",
908
+ " <td>96</td>\n",
909
+ " <td>1.974300</td>\n",
910
+ " </tr>\n",
911
+ " <tr>\n",
912
+ " <td>97</td>\n",
913
+ " <td>1.991700</td>\n",
914
+ " </tr>\n",
915
+ " <tr>\n",
916
+ " <td>98</td>\n",
917
+ " <td>1.984700</td>\n",
918
+ " </tr>\n",
919
+ " <tr>\n",
920
+ " <td>99</td>\n",
921
+ " <td>1.975900</td>\n",
922
+ " </tr>\n",
923
+ " <tr>\n",
924
+ " <td>100</td>\n",
925
+ " <td>1.963900</td>\n",
926
+ " </tr>\n",
927
+ " <tr>\n",
928
+ " <td>101</td>\n",
929
+ " <td>1.934300</td>\n",
930
+ " </tr>\n",
931
+ " <tr>\n",
932
+ " <td>102</td>\n",
933
+ " <td>1.990400</td>\n",
934
+ " </tr>\n",
935
+ " <tr>\n",
936
+ " <td>103</td>\n",
937
+ " <td>1.914900</td>\n",
938
+ " </tr>\n",
939
+ " <tr>\n",
940
+ " <td>104</td>\n",
941
+ " <td>1.956100</td>\n",
942
+ " </tr>\n",
943
+ " <tr>\n",
944
+ " <td>105</td>\n",
945
+ " <td>1.943400</td>\n",
946
+ " </tr>\n",
947
+ " <tr>\n",
948
+ " <td>106</td>\n",
949
+ " <td>1.931000</td>\n",
950
+ " </tr>\n",
951
+ " <tr>\n",
952
+ " <td>107</td>\n",
953
+ " <td>1.919000</td>\n",
954
+ " </tr>\n",
955
+ " <tr>\n",
956
+ " <td>108</td>\n",
957
+ " <td>1.912800</td>\n",
958
+ " </tr>\n",
959
+ " <tr>\n",
960
+ " <td>109</td>\n",
961
+ " <td>1.920400</td>\n",
962
+ " </tr>\n",
963
+ " <tr>\n",
964
+ " <td>110</td>\n",
965
+ " <td>1.878300</td>\n",
966
+ " </tr>\n",
967
+ " <tr>\n",
968
+ " <td>111</td>\n",
969
+ " <td>1.890800</td>\n",
970
+ " </tr>\n",
971
+ " <tr>\n",
972
+ " <td>112</td>\n",
973
+ " <td>1.881900</td>\n",
974
+ " </tr>\n",
975
+ " <tr>\n",
976
+ " <td>113</td>\n",
977
+ " <td>1.885400</td>\n",
978
+ " </tr>\n",
979
+ " <tr>\n",
980
+ " <td>114</td>\n",
981
+ " <td>1.908400</td>\n",
982
+ " </tr>\n",
983
+ " <tr>\n",
984
+ " <td>115</td>\n",
985
+ " <td>1.871200</td>\n",
986
+ " </tr>\n",
987
+ " <tr>\n",
988
+ " <td>116</td>\n",
989
+ " <td>1.900000</td>\n",
990
+ " </tr>\n",
991
+ " <tr>\n",
992
+ " <td>117</td>\n",
993
+ " <td>1.888000</td>\n",
994
+ " </tr>\n",
995
+ " <tr>\n",
996
+ " <td>118</td>\n",
997
+ " <td>1.875100</td>\n",
998
+ " </tr>\n",
999
+ " <tr>\n",
1000
+ " <td>119</td>\n",
1001
+ " <td>1.855000</td>\n",
1002
+ " </tr>\n",
1003
+ " <tr>\n",
1004
+ " <td>120</td>\n",
1005
+ " <td>1.852100</td>\n",
1006
+ " </tr>\n",
1007
+ " <tr>\n",
1008
+ " <td>121</td>\n",
1009
+ " <td>1.851200</td>\n",
1010
+ " </tr>\n",
1011
+ " <tr>\n",
1012
+ " <td>122</td>\n",
1013
+ " <td>1.821800</td>\n",
1014
+ " </tr>\n",
1015
+ " <tr>\n",
1016
+ " <td>123</td>\n",
1017
+ " <td>1.853000</td>\n",
1018
+ " </tr>\n",
1019
+ " <tr>\n",
1020
+ " <td>124</td>\n",
1021
+ " <td>1.854700</td>\n",
1022
+ " </tr>\n",
1023
+ " <tr>\n",
1024
+ " <td>125</td>\n",
1025
+ " <td>1.806900</td>\n",
1026
+ " </tr>\n",
1027
+ " <tr>\n",
1028
+ " <td>126</td>\n",
1029
+ " <td>1.845300</td>\n",
1030
+ " </tr>\n",
1031
+ " <tr>\n",
1032
+ " <td>127</td>\n",
1033
+ " <td>1.797800</td>\n",
1034
+ " </tr>\n",
1035
+ " <tr>\n",
1036
+ " <td>128</td>\n",
1037
+ " <td>1.795300</td>\n",
1038
+ " </tr>\n",
1039
+ " <tr>\n",
1040
+ " <td>129</td>\n",
1041
+ " <td>1.799500</td>\n",
1042
+ " </tr>\n",
1043
+ " <tr>\n",
1044
+ " <td>130</td>\n",
1045
+ " <td>1.853900</td>\n",
1046
+ " </tr>\n",
1047
+ " <tr>\n",
1048
+ " <td>131</td>\n",
1049
+ " <td>1.780100</td>\n",
1050
+ " </tr>\n",
1051
+ " <tr>\n",
1052
+ " <td>132</td>\n",
1053
+ " <td>1.789400</td>\n",
1054
+ " </tr>\n",
1055
+ " <tr>\n",
1056
+ " <td>133</td>\n",
1057
+ " <td>1.776700</td>\n",
1058
+ " </tr>\n",
1059
+ " <tr>\n",
1060
+ " <td>134</td>\n",
1061
+ " <td>1.747300</td>\n",
1062
+ " </tr>\n",
1063
+ " <tr>\n",
1064
+ " <td>135</td>\n",
1065
+ " <td>1.753700</td>\n",
1066
+ " </tr>\n",
1067
+ " <tr>\n",
1068
+ " <td>136</td>\n",
1069
+ " <td>1.761300</td>\n",
1070
+ " </tr>\n",
1071
+ " <tr>\n",
1072
+ " <td>137</td>\n",
1073
+ " <td>1.725500</td>\n",
1074
+ " </tr>\n",
1075
+ " <tr>\n",
1076
+ " <td>138</td>\n",
1077
+ " <td>1.710800</td>\n",
1078
+ " </tr>\n",
1079
+ " <tr>\n",
1080
+ " <td>139</td>\n",
1081
+ " <td>1.733500</td>\n",
1082
+ " </tr>\n",
1083
+ " <tr>\n",
1084
+ " <td>140</td>\n",
1085
+ " <td>1.727000</td>\n",
1086
+ " </tr>\n",
1087
+ " <tr>\n",
1088
+ " <td>141</td>\n",
1089
+ " <td>1.744300</td>\n",
1090
+ " </tr>\n",
1091
+ " <tr>\n",
1092
+ " <td>142</td>\n",
1093
+ " <td>1.728900</td>\n",
1094
+ " </tr>\n",
1095
+ " <tr>\n",
1096
+ " <td>143</td>\n",
1097
+ " <td>1.725100</td>\n",
1098
+ " </tr>\n",
1099
+ " <tr>\n",
1100
+ " <td>144</td>\n",
1101
+ " <td>1.708000</td>\n",
1102
+ " </tr>\n",
1103
+ " <tr>\n",
1104
+ " <td>145</td>\n",
1105
+ " <td>1.709000</td>\n",
1106
+ " </tr>\n",
1107
+ " <tr>\n",
1108
+ " <td>146</td>\n",
1109
+ " <td>1.704600</td>\n",
1110
+ " </tr>\n",
1111
+ " <tr>\n",
1112
+ " <td>147</td>\n",
1113
+ " <td>1.684600</td>\n",
1114
+ " </tr>\n",
1115
+ " <tr>\n",
1116
+ " <td>148</td>\n",
1117
+ " <td>1.676100</td>\n",
1118
+ " </tr>\n",
1119
+ " <tr>\n",
1120
+ " <td>149</td>\n",
1121
+ " <td>1.682800</td>\n",
1122
+ " </tr>\n",
1123
+ " <tr>\n",
1124
+ " <td>150</td>\n",
1125
+ " <td>1.669900</td>\n",
1126
+ " </tr>\n",
1127
+ " <tr>\n",
1128
+ " <td>151</td>\n",
1129
+ " <td>1.636400</td>\n",
1130
+ " </tr>\n",
1131
+ " <tr>\n",
1132
+ " <td>152</td>\n",
1133
+ " <td>1.671500</td>\n",
1134
+ " </tr>\n",
1135
+ " <tr>\n",
1136
+ " <td>153</td>\n",
1137
+ " <td>1.673200</td>\n",
1138
+ " </tr>\n",
1139
+ " <tr>\n",
1140
+ " <td>154</td>\n",
1141
+ " <td>1.644300</td>\n",
1142
+ " </tr>\n",
1143
+ " <tr>\n",
1144
+ " <td>155</td>\n",
1145
+ " <td>1.620800</td>\n",
1146
+ " </tr>\n",
1147
+ " <tr>\n",
1148
+ " <td>156</td>\n",
1149
+ " <td>1.617500</td>\n",
1150
+ " </tr>\n",
1151
+ " <tr>\n",
1152
+ " <td>157</td>\n",
1153
+ " <td>1.647700</td>\n",
1154
+ " </tr>\n",
1155
+ " <tr>\n",
1156
+ " <td>158</td>\n",
1157
+ " <td>1.629300</td>\n",
1158
+ " </tr>\n",
1159
+ " <tr>\n",
1160
+ " <td>159</td>\n",
1161
+ " <td>1.608800</td>\n",
1162
+ " </tr>\n",
1163
+ " <tr>\n",
1164
+ " <td>160</td>\n",
1165
+ " <td>1.633000</td>\n",
1166
+ " </tr>\n",
1167
+ " <tr>\n",
1168
+ " <td>161</td>\n",
1169
+ " <td>1.618200</td>\n",
1170
+ " </tr>\n",
1171
+ " <tr>\n",
1172
+ " <td>162</td>\n",
1173
+ " <td>1.634300</td>\n",
1174
+ " </tr>\n",
1175
+ " <tr>\n",
1176
+ " <td>163</td>\n",
1177
+ " <td>1.588400</td>\n",
1178
+ " </tr>\n",
1179
+ " <tr>\n",
1180
+ " <td>164</td>\n",
1181
+ " <td>1.581100</td>\n",
1182
+ " </tr>\n",
1183
+ " <tr>\n",
1184
+ " <td>165</td>\n",
1185
+ " <td>1.584500</td>\n",
1186
+ " </tr>\n",
1187
+ " <tr>\n",
1188
+ " <td>166</td>\n",
1189
+ " <td>1.594800</td>\n",
1190
+ " </tr>\n",
1191
+ " <tr>\n",
1192
+ " <td>167</td>\n",
1193
+ " <td>1.563800</td>\n",
1194
+ " </tr>\n",
1195
+ " <tr>\n",
1196
+ " <td>168</td>\n",
1197
+ " <td>1.576900</td>\n",
1198
+ " </tr>\n",
1199
+ " <tr>\n",
1200
+ " <td>169</td>\n",
1201
+ " <td>1.546300</td>\n",
1202
+ " </tr>\n",
1203
+ " <tr>\n",
1204
+ " <td>170</td>\n",
1205
+ " <td>1.569800</td>\n",
1206
+ " </tr>\n",
1207
+ " <tr>\n",
1208
+ " <td>171</td>\n",
1209
+ " <td>1.592300</td>\n",
1210
+ " </tr>\n",
1211
+ " <tr>\n",
1212
+ " <td>172</td>\n",
1213
+ " <td>1.537800</td>\n",
1214
+ " </tr>\n",
1215
+ " <tr>\n",
1216
+ " <td>173</td>\n",
1217
+ " <td>1.519200</td>\n",
1218
+ " </tr>\n",
1219
+ " <tr>\n",
1220
+ " <td>174</td>\n",
1221
+ " <td>1.512100</td>\n",
1222
+ " </tr>\n",
1223
+ " <tr>\n",
1224
+ " <td>175</td>\n",
1225
+ " <td>1.581500</td>\n",
1226
+ " </tr>\n",
1227
+ " <tr>\n",
1228
+ " <td>176</td>\n",
1229
+ " <td>1.534500</td>\n",
1230
+ " </tr>\n",
1231
+ " <tr>\n",
1232
+ " <td>177</td>\n",
1233
+ " <td>1.509400</td>\n",
1234
+ " </tr>\n",
1235
+ " <tr>\n",
1236
+ " <td>178</td>\n",
1237
+ " <td>1.521300</td>\n",
1238
+ " </tr>\n",
1239
+ " <tr>\n",
1240
+ " <td>179</td>\n",
1241
+ " <td>1.528500</td>\n",
1242
+ " </tr>\n",
1243
+ " <tr>\n",
1244
+ " <td>180</td>\n",
1245
+ " <td>1.494300</td>\n",
1246
+ " </tr>\n",
1247
+ " <tr>\n",
1248
+ " <td>181</td>\n",
1249
+ " <td>1.495000</td>\n",
1250
+ " </tr>\n",
1251
+ " <tr>\n",
1252
+ " <td>182</td>\n",
1253
+ " <td>1.499700</td>\n",
1254
+ " </tr>\n",
1255
+ " <tr>\n",
1256
+ " <td>183</td>\n",
1257
+ " <td>1.461300</td>\n",
1258
+ " </tr>\n",
1259
+ " <tr>\n",
1260
+ " <td>184</td>\n",
1261
+ " <td>1.469200</td>\n",
1262
+ " </tr>\n",
1263
+ " <tr>\n",
1264
+ " <td>185</td>\n",
1265
+ " <td>1.495200</td>\n",
1266
+ " </tr>\n",
1267
+ " <tr>\n",
1268
+ " <td>186</td>\n",
1269
+ " <td>1.467400</td>\n",
1270
+ " </tr>\n",
1271
+ " <tr>\n",
1272
+ " <td>187</td>\n",
1273
+ " <td>1.437000</td>\n",
1274
+ " </tr>\n",
1275
+ " <tr>\n",
1276
+ " <td>188</td>\n",
1277
+ " <td>1.463000</td>\n",
1278
+ " </tr>\n",
1279
+ " <tr>\n",
1280
+ " <td>189</td>\n",
1281
+ " <td>1.437900</td>\n",
1282
+ " </tr>\n",
1283
+ " <tr>\n",
1284
+ " <td>190</td>\n",
1285
+ " <td>1.467400</td>\n",
1286
+ " </tr>\n",
1287
+ " <tr>\n",
1288
+ " <td>191</td>\n",
1289
+ " <td>1.472300</td>\n",
1290
+ " </tr>\n",
1291
+ " <tr>\n",
1292
+ " <td>192</td>\n",
1293
+ " <td>1.434000</td>\n",
1294
+ " </tr>\n",
1295
+ " <tr>\n",
1296
+ " <td>193</td>\n",
1297
+ " <td>1.411500</td>\n",
1298
+ " </tr>\n",
1299
+ " <tr>\n",
1300
+ " <td>194</td>\n",
1301
+ " <td>1.432500</td>\n",
1302
+ " </tr>\n",
1303
+ " <tr>\n",
1304
+ " <td>195</td>\n",
1305
+ " <td>1.459800</td>\n",
1306
+ " </tr>\n",
1307
+ " <tr>\n",
1308
+ " <td>196</td>\n",
1309
+ " <td>1.431900</td>\n",
1310
+ " </tr>\n",
1311
+ " <tr>\n",
1312
+ " <td>197</td>\n",
1313
+ " <td>1.456200</td>\n",
1314
+ " </tr>\n",
1315
+ " <tr>\n",
1316
+ " <td>198</td>\n",
1317
+ " <td>1.394800</td>\n",
1318
+ " </tr>\n",
1319
+ " <tr>\n",
1320
+ " <td>199</td>\n",
1321
+ " <td>1.422700</td>\n",
1322
+ " </tr>\n",
1323
+ " <tr>\n",
1324
+ " <td>200</td>\n",
1325
+ " <td>1.412800</td>\n",
1326
+ " </tr>\n",
1327
+ " <tr>\n",
1328
+ " <td>201</td>\n",
1329
+ " <td>1.413800</td>\n",
1330
+ " </tr>\n",
1331
+ " <tr>\n",
1332
+ " <td>202</td>\n",
1333
+ " <td>1.380000</td>\n",
1334
+ " </tr>\n",
1335
+ " <tr>\n",
1336
+ " <td>203</td>\n",
1337
+ " <td>1.407400</td>\n",
1338
+ " </tr>\n",
1339
+ " <tr>\n",
1340
+ " <td>204</td>\n",
1341
+ " <td>1.406200</td>\n",
1342
+ " </tr>\n",
1343
+ " <tr>\n",
1344
+ " <td>205</td>\n",
1345
+ " <td>1.396100</td>\n",
1346
+ " </tr>\n",
1347
+ " <tr>\n",
1348
+ " <td>206</td>\n",
1349
+ " <td>1.407100</td>\n",
1350
+ " </tr>\n",
1351
+ " <tr>\n",
1352
+ " <td>207</td>\n",
1353
+ " <td>1.379600</td>\n",
1354
+ " </tr>\n",
1355
+ " <tr>\n",
1356
+ " <td>208</td>\n",
1357
+ " <td>1.360600</td>\n",
1358
+ " </tr>\n",
1359
+ " <tr>\n",
1360
+ " <td>209</td>\n",
1361
+ " <td>1.395100</td>\n",
1362
+ " </tr>\n",
1363
+ " <tr>\n",
1364
+ " <td>210</td>\n",
1365
+ " <td>1.352500</td>\n",
1366
+ " </tr>\n",
1367
+ " <tr>\n",
1368
+ " <td>211</td>\n",
1369
+ " <td>1.358900</td>\n",
1370
+ " </tr>\n",
1371
+ " <tr>\n",
1372
+ " <td>212</td>\n",
1373
+ " <td>1.369100</td>\n",
1374
+ " </tr>\n",
1375
+ " <tr>\n",
1376
+ " <td>213</td>\n",
1377
+ " <td>1.342600</td>\n",
1378
+ " </tr>\n",
1379
+ " <tr>\n",
1380
+ " <td>214</td>\n",
1381
+ " <td>1.358900</td>\n",
1382
+ " </tr>\n",
1383
+ " <tr>\n",
1384
+ " <td>215</td>\n",
1385
+ " <td>1.320300</td>\n",
1386
+ " </tr>\n",
1387
+ " <tr>\n",
1388
+ " <td>216</td>\n",
1389
+ " <td>1.355700</td>\n",
1390
+ " </tr>\n",
1391
+ " <tr>\n",
1392
+ " <td>217</td>\n",
1393
+ " <td>1.315700</td>\n",
1394
+ " </tr>\n",
1395
+ " <tr>\n",
1396
+ " <td>218</td>\n",
1397
+ " <td>1.348800</td>\n",
1398
+ " </tr>\n",
1399
+ " <tr>\n",
1400
+ " <td>219</td>\n",
1401
+ " <td>1.319800</td>\n",
1402
+ " </tr>\n",
1403
+ " <tr>\n",
1404
+ " <td>220</td>\n",
1405
+ " <td>1.336500</td>\n",
1406
+ " </tr>\n",
1407
+ " <tr>\n",
1408
+ " <td>221</td>\n",
1409
+ " <td>1.339600</td>\n",
1410
+ " </tr>\n",
1411
+ " <tr>\n",
1412
+ " <td>222</td>\n",
1413
+ " <td>1.319500</td>\n",
1414
+ " </tr>\n",
1415
+ " <tr>\n",
1416
+ " <td>223</td>\n",
1417
+ " <td>1.319600</td>\n",
1418
+ " </tr>\n",
1419
+ " <tr>\n",
1420
+ " <td>224</td>\n",
1421
+ " <td>1.330200</td>\n",
1422
+ " </tr>\n",
1423
+ " <tr>\n",
1424
+ " <td>225</td>\n",
1425
+ " <td>1.271700</td>\n",
1426
+ " </tr>\n",
1427
+ " <tr>\n",
1428
+ " <td>226</td>\n",
1429
+ " <td>1.317300</td>\n",
1430
+ " </tr>\n",
1431
+ " <tr>\n",
1432
+ " <td>227</td>\n",
1433
+ " <td>1.287400</td>\n",
1434
+ " </tr>\n",
1435
+ " <tr>\n",
1436
+ " <td>228</td>\n",
1437
+ " <td>1.283300</td>\n",
1438
+ " </tr>\n",
1439
+ " <tr>\n",
1440
+ " <td>229</td>\n",
1441
+ " <td>1.280500</td>\n",
1442
+ " </tr>\n",
1443
+ " <tr>\n",
1444
+ " <td>230</td>\n",
1445
+ " <td>1.274200</td>\n",
1446
+ " </tr>\n",
1447
+ " <tr>\n",
1448
+ " <td>231</td>\n",
1449
+ " <td>1.297000</td>\n",
1450
+ " </tr>\n",
1451
+ " <tr>\n",
1452
+ " <td>232</td>\n",
1453
+ " <td>1.266400</td>\n",
1454
+ " </tr>\n",
1455
+ " <tr>\n",
1456
+ " <td>233</td>\n",
1457
+ " <td>1.253100</td>\n",
1458
+ " </tr>\n",
1459
+ " <tr>\n",
1460
+ " <td>234</td>\n",
1461
+ " <td>1.273100</td>\n",
1462
+ " </tr>\n",
1463
+ " <tr>\n",
1464
+ " <td>235</td>\n",
1465
+ " <td>1.293300</td>\n",
1466
+ " </tr>\n",
1467
+ " <tr>\n",
1468
+ " <td>236</td>\n",
1469
+ " <td>1.293000</td>\n",
1470
+ " </tr>\n",
1471
+ " <tr>\n",
1472
+ " <td>237</td>\n",
1473
+ " <td>1.273500</td>\n",
1474
+ " </tr>\n",
1475
+ " <tr>\n",
1476
+ " <td>238</td>\n",
1477
+ " <td>1.253100</td>\n",
1478
+ " </tr>\n",
1479
+ " <tr>\n",
1480
+ " <td>239</td>\n",
1481
+ " <td>1.257700</td>\n",
1482
+ " </tr>\n",
1483
+ " <tr>\n",
1484
+ " <td>240</td>\n",
1485
+ " <td>1.232500</td>\n",
1486
+ " </tr>\n",
1487
+ " <tr>\n",
1488
+ " <td>241</td>\n",
1489
+ " <td>1.233100</td>\n",
1490
+ " </tr>\n",
1491
+ " <tr>\n",
1492
+ " <td>242</td>\n",
1493
+ " <td>1.226000</td>\n",
1494
+ " </tr>\n",
1495
+ " <tr>\n",
1496
+ " <td>243</td>\n",
1497
+ " <td>1.218400</td>\n",
1498
+ " </tr>\n",
1499
+ " <tr>\n",
1500
+ " <td>244</td>\n",
1501
+ " <td>1.222800</td>\n",
1502
+ " </tr>\n",
1503
+ " <tr>\n",
1504
+ " <td>245</td>\n",
1505
+ " <td>1.232100</td>\n",
1506
+ " </tr>\n",
1507
+ " <tr>\n",
1508
+ " <td>246</td>\n",
1509
+ " <td>1.214800</td>\n",
1510
+ " </tr>\n",
1511
+ " <tr>\n",
1512
+ " <td>247</td>\n",
1513
+ " <td>1.205700</td>\n",
1514
+ " </tr>\n",
1515
+ " <tr>\n",
1516
+ " <td>248</td>\n",
1517
+ " <td>1.228400</td>\n",
1518
+ " </tr>\n",
1519
+ " <tr>\n",
1520
+ " <td>249</td>\n",
1521
+ " <td>1.202600</td>\n",
1522
+ " </tr>\n",
1523
+ " <tr>\n",
1524
+ " <td>250</td>\n",
1525
+ " <td>1.207700</td>\n",
1526
+ " </tr>\n",
1527
+ " <tr>\n",
1528
+ " <td>251</td>\n",
1529
+ " <td>1.205800</td>\n",
1530
+ " </tr>\n",
1531
+ " <tr>\n",
1532
+ " <td>252</td>\n",
1533
+ " <td>1.198400</td>\n",
1534
+ " </tr>\n",
1535
+ " <tr>\n",
1536
+ " <td>253</td>\n",
1537
+ " <td>1.207800</td>\n",
1538
+ " </tr>\n",
1539
+ " <tr>\n",
1540
+ " <td>254</td>\n",
1541
+ " <td>1.198600</td>\n",
1542
+ " </tr>\n",
1543
+ " <tr>\n",
1544
+ " <td>255</td>\n",
1545
+ " <td>1.201700</td>\n",
1546
+ " </tr>\n",
1547
+ " <tr>\n",
1548
+ " <td>256</td>\n",
1549
+ " <td>1.195500</td>\n",
1550
+ " </tr>\n",
1551
+ " <tr>\n",
1552
+ " <td>257</td>\n",
1553
+ " <td>1.190500</td>\n",
1554
+ " </tr>\n",
1555
+ " <tr>\n",
1556
+ " <td>258</td>\n",
1557
+ " <td>1.197100</td>\n",
1558
+ " </tr>\n",
1559
+ " <tr>\n",
1560
+ " <td>259</td>\n",
1561
+ " <td>1.165100</td>\n",
1562
+ " </tr>\n",
1563
+ " <tr>\n",
1564
+ " <td>260</td>\n",
1565
+ " <td>1.173200</td>\n",
1566
+ " </tr>\n",
1567
+ " <tr>\n",
1568
+ " <td>261</td>\n",
1569
+ " <td>1.163400</td>\n",
1570
+ " </tr>\n",
1571
+ " <tr>\n",
1572
+ " <td>262</td>\n",
1573
+ " <td>1.191500</td>\n",
1574
+ " </tr>\n",
1575
+ " <tr>\n",
1576
+ " <td>263</td>\n",
1577
+ " <td>1.173700</td>\n",
1578
+ " </tr>\n",
1579
+ " <tr>\n",
1580
+ " <td>264</td>\n",
1581
+ " <td>1.134400</td>\n",
1582
+ " </tr>\n",
1583
+ " <tr>\n",
1584
+ " <td>265</td>\n",
1585
+ " <td>1.165500</td>\n",
1586
+ " </tr>\n",
1587
+ " <tr>\n",
1588
+ " <td>266</td>\n",
1589
+ " <td>1.134800</td>\n",
1590
+ " </tr>\n",
1591
+ " <tr>\n",
1592
+ " <td>267</td>\n",
1593
+ " <td>1.149500</td>\n",
1594
+ " </tr>\n",
1595
+ " <tr>\n",
1596
+ " <td>268</td>\n",
1597
+ " <td>1.173100</td>\n",
1598
+ " </tr>\n",
1599
+ " <tr>\n",
1600
+ " <td>269</td>\n",
1601
+ " <td>1.137000</td>\n",
1602
+ " </tr>\n",
1603
+ " <tr>\n",
1604
+ " <td>270</td>\n",
1605
+ " <td>1.171200</td>\n",
1606
+ " </tr>\n",
1607
+ " <tr>\n",
1608
+ " <td>271</td>\n",
1609
+ " <td>1.120600</td>\n",
1610
+ " </tr>\n",
1611
+ " <tr>\n",
1612
+ " <td>272</td>\n",
1613
+ " <td>1.147600</td>\n",
1614
+ " </tr>\n",
1615
+ " <tr>\n",
1616
+ " <td>273</td>\n",
1617
+ " <td>1.128300</td>\n",
1618
+ " </tr>\n",
1619
+ " <tr>\n",
1620
+ " <td>274</td>\n",
1621
+ " <td>1.150300</td>\n",
1622
+ " </tr>\n",
1623
+ " <tr>\n",
1624
+ " <td>275</td>\n",
1625
+ " <td>1.147700</td>\n",
1626
+ " </tr>\n",
1627
+ " <tr>\n",
1628
+ " <td>276</td>\n",
1629
+ " <td>1.150200</td>\n",
1630
+ " </tr>\n",
1631
+ " <tr>\n",
1632
+ " <td>277</td>\n",
1633
+ " <td>1.106900</td>\n",
1634
+ " </tr>\n",
1635
+ " <tr>\n",
1636
+ " <td>278</td>\n",
1637
+ " <td>1.145400</td>\n",
1638
+ " </tr>\n",
1639
+ " <tr>\n",
1640
+ " <td>279</td>\n",
1641
+ " <td>1.117300</td>\n",
1642
+ " </tr>\n",
1643
+ " <tr>\n",
1644
+ " <td>280</td>\n",
1645
+ " <td>1.121900</td>\n",
1646
+ " </tr>\n",
1647
+ " <tr>\n",
1648
+ " <td>281</td>\n",
1649
+ " <td>1.139400</td>\n",
1650
+ " </tr>\n",
1651
+ " <tr>\n",
1652
+ " <td>282</td>\n",
1653
+ " <td>1.109100</td>\n",
1654
+ " </tr>\n",
1655
+ " <tr>\n",
1656
+ " <td>283</td>\n",
1657
+ " <td>1.142100</td>\n",
1658
+ " </tr>\n",
1659
+ " <tr>\n",
1660
+ " <td>284</td>\n",
1661
+ " <td>1.117300</td>\n",
1662
+ " </tr>\n",
1663
+ " <tr>\n",
1664
+ " <td>285</td>\n",
1665
+ " <td>1.104200</td>\n",
1666
+ " </tr>\n",
1667
+ " <tr>\n",
1668
+ " <td>286</td>\n",
1669
+ " <td>1.134200</td>\n",
1670
+ " </tr>\n",
1671
+ " <tr>\n",
1672
+ " <td>287</td>\n",
1673
+ " <td>1.100400</td>\n",
1674
+ " </tr>\n",
1675
+ " <tr>\n",
1676
+ " <td>288</td>\n",
1677
+ " <td>1.092100</td>\n",
1678
+ " </tr>\n",
1679
+ " <tr>\n",
1680
+ " <td>289</td>\n",
1681
+ " <td>1.120500</td>\n",
1682
+ " </tr>\n",
1683
+ " <tr>\n",
1684
+ " <td>290</td>\n",
1685
+ " <td>1.088100</td>\n",
1686
+ " </tr>\n",
1687
+ " <tr>\n",
1688
+ " <td>291</td>\n",
1689
+ " <td>1.128600</td>\n",
1690
+ " </tr>\n",
1691
+ " <tr>\n",
1692
+ " <td>292</td>\n",
1693
+ " <td>1.105400</td>\n",
1694
+ " </tr>\n",
1695
+ " <tr>\n",
1696
+ " <td>293</td>\n",
1697
+ " <td>1.094000</td>\n",
1698
+ " </tr>\n",
1699
+ " <tr>\n",
1700
+ " <td>294</td>\n",
1701
+ " <td>1.108900</td>\n",
1702
+ " </tr>\n",
1703
+ " <tr>\n",
1704
+ " <td>295</td>\n",
1705
+ " <td>1.073100</td>\n",
1706
+ " </tr>\n",
1707
+ " <tr>\n",
1708
+ " <td>296</td>\n",
1709
+ " <td>1.100900</td>\n",
1710
+ " </tr>\n",
1711
+ " <tr>\n",
1712
+ " <td>297</td>\n",
1713
+ " <td>1.092400</td>\n",
1714
+ " </tr>\n",
1715
+ " <tr>\n",
1716
+ " <td>298</td>\n",
1717
+ " <td>1.090300</td>\n",
1718
+ " </tr>\n",
1719
+ " <tr>\n",
1720
+ " <td>299</td>\n",
1721
+ " <td>1.079400</td>\n",
1722
+ " </tr>\n",
1723
+ " <tr>\n",
1724
+ " <td>300</td>\n",
1725
+ " <td>1.090300</td>\n",
1726
+ " </tr>\n",
1727
+ " <tr>\n",
1728
+ " <td>301</td>\n",
1729
+ " <td>1.086100</td>\n",
1730
+ " </tr>\n",
1731
+ " <tr>\n",
1732
+ " <td>302</td>\n",
1733
+ " <td>1.080300</td>\n",
1734
+ " </tr>\n",
1735
+ " <tr>\n",
1736
+ " <td>303</td>\n",
1737
+ " <td>1.075600</td>\n",
1738
+ " </tr>\n",
1739
+ " <tr>\n",
1740
+ " <td>304</td>\n",
1741
+ " <td>1.075900</td>\n",
1742
+ " </tr>\n",
1743
+ " <tr>\n",
1744
+ " <td>305</td>\n",
1745
+ " <td>1.092200</td>\n",
1746
+ " </tr>\n",
1747
+ " <tr>\n",
1748
+ " <td>306</td>\n",
1749
+ " <td>1.070600</td>\n",
1750
+ " </tr>\n",
1751
+ " <tr>\n",
1752
+ " <td>307</td>\n",
1753
+ " <td>1.068800</td>\n",
1754
+ " </tr>\n",
1755
+ " <tr>\n",
1756
+ " <td>308</td>\n",
1757
+ " <td>1.071300</td>\n",
1758
+ " </tr>\n",
1759
+ " <tr>\n",
1760
+ " <td>309</td>\n",
1761
+ " <td>1.073900</td>\n",
1762
+ " </tr>\n",
1763
+ " <tr>\n",
1764
+ " <td>310</td>\n",
1765
+ " <td>1.055400</td>\n",
1766
+ " </tr>\n",
1767
+ " <tr>\n",
1768
+ " <td>311</td>\n",
1769
+ " <td>1.067900</td>\n",
1770
+ " </tr>\n",
1771
+ " <tr>\n",
1772
+ " <td>312</td>\n",
1773
+ " <td>1.041000</td>\n",
1774
+ " </tr>\n",
1775
+ " <tr>\n",
1776
+ " <td>313</td>\n",
1777
+ " <td>1.048600</td>\n",
1778
+ " </tr>\n",
1779
+ " <tr>\n",
1780
+ " <td>314</td>\n",
1781
+ " <td>1.072600</td>\n",
1782
+ " </tr>\n",
1783
+ " <tr>\n",
1784
+ " <td>315</td>\n",
1785
+ " <td>1.058800</td>\n",
1786
+ " </tr>\n",
1787
+ " <tr>\n",
1788
+ " <td>316</td>\n",
1789
+ " <td>1.039000</td>\n",
1790
+ " </tr>\n",
1791
+ " <tr>\n",
1792
+ " <td>317</td>\n",
1793
+ " <td>1.072300</td>\n",
1794
+ " </tr>\n",
1795
+ " <tr>\n",
1796
+ " <td>318</td>\n",
1797
+ " <td>1.056600</td>\n",
1798
+ " </tr>\n",
1799
+ " <tr>\n",
1800
+ " <td>319</td>\n",
1801
+ " <td>1.035100</td>\n",
1802
+ " </tr>\n",
1803
+ " <tr>\n",
1804
+ " <td>320</td>\n",
1805
+ " <td>1.052800</td>\n",
1806
+ " </tr>\n",
1807
+ " <tr>\n",
1808
+ " <td>321</td>\n",
1809
+ " <td>1.046700</td>\n",
1810
+ " </tr>\n",
1811
+ " <tr>\n",
1812
+ " <td>322</td>\n",
1813
+ " <td>1.073400</td>\n",
1814
+ " </tr>\n",
1815
+ " <tr>\n",
1816
+ " <td>323</td>\n",
1817
+ " <td>1.054000</td>\n",
1818
+ " </tr>\n",
1819
+ " <tr>\n",
1820
+ " <td>324</td>\n",
1821
+ " <td>1.077100</td>\n",
1822
+ " </tr>\n",
1823
+ " <tr>\n",
1824
+ " <td>325</td>\n",
1825
+ " <td>1.035200</td>\n",
1826
+ " </tr>\n",
1827
+ " <tr>\n",
1828
+ " <td>326</td>\n",
1829
+ " <td>1.027700</td>\n",
1830
+ " </tr>\n",
1831
+ " <tr>\n",
1832
+ " <td>327</td>\n",
1833
+ " <td>1.060000</td>\n",
1834
+ " </tr>\n",
1835
+ " <tr>\n",
1836
+ " <td>328</td>\n",
1837
+ " <td>1.048900</td>\n",
1838
+ " </tr>\n",
1839
+ " <tr>\n",
1840
+ " <td>329</td>\n",
1841
+ " <td>1.040000</td>\n",
1842
+ " </tr>\n",
1843
+ " <tr>\n",
1844
+ " <td>330</td>\n",
1845
+ " <td>1.026900</td>\n",
1846
+ " </tr>\n",
1847
+ " <tr>\n",
1848
+ " <td>331</td>\n",
1849
+ " <td>1.049300</td>\n",
1850
+ " </tr>\n",
1851
+ " <tr>\n",
1852
+ " <td>332</td>\n",
1853
+ " <td>1.017100</td>\n",
1854
+ " </tr>\n",
1855
+ " <tr>\n",
1856
+ " <td>333</td>\n",
1857
+ " <td>0.996200</td>\n",
1858
+ " </tr>\n",
1859
+ " <tr>\n",
1860
+ " <td>334</td>\n",
1861
+ " <td>1.006400</td>\n",
1862
+ " </tr>\n",
1863
+ " <tr>\n",
1864
+ " <td>335</td>\n",
1865
+ " <td>1.026700</td>\n",
1866
+ " </tr>\n",
1867
+ " <tr>\n",
1868
+ " <td>336</td>\n",
1869
+ " <td>1.073700</td>\n",
1870
+ " </tr>\n",
1871
+ " <tr>\n",
1872
+ " <td>337</td>\n",
1873
+ " <td>1.039200</td>\n",
1874
+ " </tr>\n",
1875
+ " <tr>\n",
1876
+ " <td>338</td>\n",
1877
+ " <td>1.041100</td>\n",
1878
+ " </tr>\n",
1879
+ " <tr>\n",
1880
+ " <td>339</td>\n",
1881
+ " <td>1.054300</td>\n",
1882
+ " </tr>\n",
1883
+ " <tr>\n",
1884
+ " <td>340</td>\n",
1885
+ " <td>1.013500</td>\n",
1886
+ " </tr>\n",
1887
+ " <tr>\n",
1888
+ " <td>341</td>\n",
1889
+ " <td>1.024900</td>\n",
1890
+ " </tr>\n",
1891
+ " <tr>\n",
1892
+ " <td>342</td>\n",
1893
+ " <td>1.003300</td>\n",
1894
+ " </tr>\n",
1895
+ " <tr>\n",
1896
+ " <td>343</td>\n",
1897
+ " <td>0.993400</td>\n",
1898
+ " </tr>\n",
1899
+ " <tr>\n",
1900
+ " <td>344</td>\n",
1901
+ " <td>1.037300</td>\n",
1902
+ " </tr>\n",
1903
+ " <tr>\n",
1904
+ " <td>345</td>\n",
1905
+ " <td>1.009300</td>\n",
1906
+ " </tr>\n",
1907
+ " <tr>\n",
1908
+ " <td>346</td>\n",
1909
+ " <td>1.030400</td>\n",
1910
+ " </tr>\n",
1911
+ " <tr>\n",
1912
+ " <td>347</td>\n",
1913
+ " <td>1.001400</td>\n",
1914
+ " </tr>\n",
1915
+ " <tr>\n",
1916
+ " <td>348</td>\n",
1917
+ " <td>1.012100</td>\n",
1918
+ " </tr>\n",
1919
+ " <tr>\n",
1920
+ " <td>349</td>\n",
1921
+ " <td>1.027300</td>\n",
1922
+ " </tr>\n",
1923
+ " <tr>\n",
1924
+ " <td>350</td>\n",
1925
+ " <td>1.012700</td>\n",
1926
+ " </tr>\n",
1927
+ " <tr>\n",
1928
+ " <td>351</td>\n",
1929
+ " <td>1.013400</td>\n",
1930
+ " </tr>\n",
1931
+ " <tr>\n",
1932
+ " <td>352</td>\n",
1933
+ " <td>1.004400</td>\n",
1934
+ " </tr>\n",
1935
+ " <tr>\n",
1936
+ " <td>353</td>\n",
1937
+ " <td>1.024800</td>\n",
1938
+ " </tr>\n",
1939
+ " <tr>\n",
1940
+ " <td>354</td>\n",
1941
+ " <td>0.990700</td>\n",
1942
+ " </tr>\n",
1943
+ " <tr>\n",
1944
+ " <td>355</td>\n",
1945
+ " <td>1.048600</td>\n",
1946
+ " </tr>\n",
1947
+ " <tr>\n",
1948
+ " <td>356</td>\n",
1949
+ " <td>0.992700</td>\n",
1950
+ " </tr>\n",
1951
+ " <tr>\n",
1952
+ " <td>357</td>\n",
1953
+ " <td>0.991800</td>\n",
1954
+ " </tr>\n",
1955
+ " <tr>\n",
1956
+ " <td>358</td>\n",
1957
+ " <td>0.985300</td>\n",
1958
+ " </tr>\n",
1959
+ " <tr>\n",
1960
+ " <td>359</td>\n",
1961
+ " <td>1.019100</td>\n",
1962
+ " </tr>\n",
1963
+ " <tr>\n",
1964
+ " <td>360</td>\n",
1965
+ " <td>1.007300</td>\n",
1966
+ " </tr>\n",
1967
+ " <tr>\n",
1968
+ " <td>361</td>\n",
1969
+ " <td>1.025500</td>\n",
1970
+ " </tr>\n",
1971
+ " <tr>\n",
1972
+ " <td>362</td>\n",
1973
+ " <td>0.999100</td>\n",
1974
+ " </tr>\n",
1975
+ " <tr>\n",
1976
+ " <td>363</td>\n",
1977
+ " <td>0.997900</td>\n",
1978
+ " </tr>\n",
1979
+ " <tr>\n",
1980
+ " <td>364</td>\n",
1981
+ " <td>1.013300</td>\n",
1982
+ " </tr>\n",
1983
+ " <tr>\n",
1984
+ " <td>365</td>\n",
1985
+ " <td>1.014700</td>\n",
1986
+ " </tr>\n",
1987
+ " <tr>\n",
1988
+ " <td>366</td>\n",
1989
+ " <td>1.037700</td>\n",
1990
+ " </tr>\n",
1991
+ " <tr>\n",
1992
+ " <td>367</td>\n",
1993
+ " <td>0.992400</td>\n",
1994
+ " </tr>\n",
1995
+ " <tr>\n",
1996
+ " <td>368</td>\n",
1997
+ " <td>0.988800</td>\n",
1998
+ " </tr>\n",
1999
+ " <tr>\n",
2000
+ " <td>369</td>\n",
2001
+ " <td>0.993900</td>\n",
2002
+ " </tr>\n",
2003
+ " <tr>\n",
2004
+ " <td>370</td>\n",
2005
+ " <td>0.999500</td>\n",
2006
+ " </tr>\n",
2007
+ " <tr>\n",
2008
+ " <td>371</td>\n",
2009
+ " <td>0.973000</td>\n",
2010
+ " </tr>\n",
2011
+ " <tr>\n",
2012
+ " <td>372</td>\n",
2013
+ " <td>0.972200</td>\n",
2014
+ " </tr>\n",
2015
+ " <tr>\n",
2016
+ " <td>373</td>\n",
2017
+ " <td>0.989200</td>\n",
2018
+ " </tr>\n",
2019
+ " <tr>\n",
2020
+ " <td>374</td>\n",
2021
+ " <td>0.994500</td>\n",
2022
+ " </tr>\n",
2023
+ " <tr>\n",
2024
+ " <td>375</td>\n",
2025
+ " <td>0.995800</td>\n",
2026
+ " </tr>\n",
2027
+ " <tr>\n",
2028
+ " <td>376</td>\n",
2029
+ " <td>0.992000</td>\n",
2030
+ " </tr>\n",
2031
+ " <tr>\n",
2032
+ " <td>377</td>\n",
2033
+ " <td>0.977800</td>\n",
2034
+ " </tr>\n",
2035
+ " <tr>\n",
2036
+ " <td>378</td>\n",
2037
+ " <td>0.975700</td>\n",
2038
+ " </tr>\n",
2039
+ " <tr>\n",
2040
+ " <td>379</td>\n",
2041
+ " <td>0.973700</td>\n",
2042
+ " </tr>\n",
2043
+ " <tr>\n",
2044
+ " <td>380</td>\n",
2045
+ " <td>0.986200</td>\n",
2046
+ " </tr>\n",
2047
+ " <tr>\n",
2048
+ " <td>381</td>\n",
2049
+ " <td>1.008000</td>\n",
2050
+ " </tr>\n",
2051
+ " <tr>\n",
2052
+ " <td>382</td>\n",
2053
+ " <td>0.954100</td>\n",
2054
+ " </tr>\n",
2055
+ " <tr>\n",
2056
+ " <td>383</td>\n",
2057
+ " <td>1.015900</td>\n",
2058
+ " </tr>\n",
2059
+ " <tr>\n",
2060
+ " <td>384</td>\n",
2061
+ " <td>1.008200</td>\n",
2062
+ " </tr>\n",
2063
+ " <tr>\n",
2064
+ " <td>385</td>\n",
2065
+ " <td>0.974700</td>\n",
2066
+ " </tr>\n",
2067
+ " <tr>\n",
2068
+ " <td>386</td>\n",
2069
+ " <td>0.987500</td>\n",
2070
+ " </tr>\n",
2071
+ " <tr>\n",
2072
+ " <td>387</td>\n",
2073
+ " <td>0.993700</td>\n",
2074
+ " </tr>\n",
2075
+ " <tr>\n",
2076
+ " <td>388</td>\n",
2077
+ " <td>0.999200</td>\n",
2078
+ " </tr>\n",
2079
+ " <tr>\n",
2080
+ " <td>389</td>\n",
2081
+ " <td>1.000700</td>\n",
2082
+ " </tr>\n",
2083
+ " <tr>\n",
2084
+ " <td>390</td>\n",
2085
+ " <td>0.978600</td>\n",
2086
+ " </tr>\n",
2087
+ " <tr>\n",
2088
+ " <td>391</td>\n",
2089
+ " <td>0.956200</td>\n",
2090
+ " </tr>\n",
2091
+ " <tr>\n",
2092
+ " <td>392</td>\n",
2093
+ " <td>1.001600</td>\n",
2094
+ " </tr>\n",
2095
+ " <tr>\n",
2096
+ " <td>393</td>\n",
2097
+ " <td>0.971300</td>\n",
2098
+ " </tr>\n",
2099
+ " <tr>\n",
2100
+ " <td>394</td>\n",
2101
+ " <td>0.965800</td>\n",
2102
+ " </tr>\n",
2103
+ " <tr>\n",
2104
+ " <td>395</td>\n",
2105
+ " <td>0.981000</td>\n",
2106
+ " </tr>\n",
2107
+ " <tr>\n",
2108
+ " <td>396</td>\n",
2109
+ " <td>0.965400</td>\n",
2110
+ " </tr>\n",
2111
+ " <tr>\n",
2112
+ " <td>397</td>\n",
2113
+ " <td>0.974200</td>\n",
2114
+ " </tr>\n",
2115
+ " <tr>\n",
2116
+ " <td>398</td>\n",
2117
+ " <td>0.970700</td>\n",
2118
+ " </tr>\n",
2119
+ " <tr>\n",
2120
+ " <td>399</td>\n",
2121
+ " <td>0.953500</td>\n",
2122
+ " </tr>\n",
2123
+ " <tr>\n",
2124
+ " <td>400</td>\n",
2125
+ " <td>0.979700</td>\n",
2126
+ " </tr>\n",
2127
+ " <tr>\n",
2128
+ " <td>401</td>\n",
2129
+ " <td>0.957700</td>\n",
2130
+ " </tr>\n",
2131
+ " <tr>\n",
2132
+ " <td>402</td>\n",
2133
+ " <td>0.984600</td>\n",
2134
+ " </tr>\n",
2135
+ " <tr>\n",
2136
+ " <td>403</td>\n",
2137
+ " <td>1.015600</td>\n",
2138
+ " </tr>\n",
2139
+ " <tr>\n",
2140
+ " <td>404</td>\n",
2141
+ " <td>0.976800</td>\n",
2142
+ " </tr>\n",
2143
+ " <tr>\n",
2144
+ " <td>405</td>\n",
2145
+ " <td>0.969100</td>\n",
2146
+ " </tr>\n",
2147
+ " <tr>\n",
2148
+ " <td>406</td>\n",
2149
+ " <td>0.974200</td>\n",
2150
+ " </tr>\n",
2151
+ " <tr>\n",
2152
+ " <td>407</td>\n",
2153
+ " <td>0.983300</td>\n",
2154
+ " </tr>\n",
2155
+ " <tr>\n",
2156
+ " <td>408</td>\n",
2157
+ " <td>0.974300</td>\n",
2158
+ " </tr>\n",
2159
+ " <tr>\n",
2160
+ " <td>409</td>\n",
2161
+ " <td>0.980600</td>\n",
2162
+ " </tr>\n",
2163
+ " <tr>\n",
2164
+ " <td>410</td>\n",
2165
+ " <td>0.986300</td>\n",
2166
+ " </tr>\n",
2167
+ " <tr>\n",
2168
+ " <td>411</td>\n",
2169
+ " <td>0.968100</td>\n",
2170
+ " </tr>\n",
2171
+ " <tr>\n",
2172
+ " <td>412</td>\n",
2173
+ " <td>0.980500</td>\n",
2174
+ " </tr>\n",
2175
+ " <tr>\n",
2176
+ " <td>413</td>\n",
2177
+ " <td>0.976200</td>\n",
2178
+ " </tr>\n",
2179
+ " <tr>\n",
2180
+ " <td>414</td>\n",
2181
+ " <td>0.987300</td>\n",
2182
+ " </tr>\n",
2183
+ " <tr>\n",
2184
+ " <td>415</td>\n",
2185
+ " <td>0.971600</td>\n",
2186
+ " </tr>\n",
2187
+ " <tr>\n",
2188
+ " <td>416</td>\n",
2189
+ " <td>0.985200</td>\n",
2190
+ " </tr>\n",
2191
+ " <tr>\n",
2192
+ " <td>417</td>\n",
2193
+ " <td>0.989800</td>\n",
2194
+ " </tr>\n",
2195
+ " <tr>\n",
2196
+ " <td>418</td>\n",
2197
+ " <td>0.972000</td>\n",
2198
+ " </tr>\n",
2199
+ " <tr>\n",
2200
+ " <td>419</td>\n",
2201
+ " <td>0.971100</td>\n",
2202
+ " </tr>\n",
2203
+ " <tr>\n",
2204
+ " <td>420</td>\n",
2205
+ " <td>0.988800</td>\n",
2206
+ " </tr>\n",
2207
+ " <tr>\n",
2208
+ " <td>421</td>\n",
2209
+ " <td>0.965600</td>\n",
2210
+ " </tr>\n",
2211
+ " <tr>\n",
2212
+ " <td>422</td>\n",
2213
+ " <td>1.020400</td>\n",
2214
+ " </tr>\n",
2215
+ " <tr>\n",
2216
+ " <td>423</td>\n",
2217
+ " <td>0.978000</td>\n",
2218
+ " </tr>\n",
2219
+ " <tr>\n",
2220
+ " <td>424</td>\n",
2221
+ " <td>0.987800</td>\n",
2222
+ " </tr>\n",
2223
+ " <tr>\n",
2224
+ " <td>425</td>\n",
2225
+ " <td>0.953700</td>\n",
2226
+ " </tr>\n",
2227
+ " <tr>\n",
2228
+ " <td>426</td>\n",
2229
+ " <td>0.990400</td>\n",
2230
+ " </tr>\n",
2231
+ " <tr>\n",
2232
+ " <td>427</td>\n",
2233
+ " <td>0.982900</td>\n",
2234
+ " </tr>\n",
2235
+ " <tr>\n",
2236
+ " <td>428</td>\n",
2237
+ " <td>0.989100</td>\n",
2238
+ " </tr>\n",
2239
+ " <tr>\n",
2240
+ " <td>429</td>\n",
2241
+ " <td>0.983800</td>\n",
2242
+ " </tr>\n",
2243
+ " <tr>\n",
2244
+ " <td>430</td>\n",
2245
+ " <td>0.981500</td>\n",
2246
+ " </tr>\n",
2247
+ " <tr>\n",
2248
+ " <td>431</td>\n",
2249
+ " <td>0.966900</td>\n",
2250
+ " </tr>\n",
2251
+ " <tr>\n",
2252
+ " <td>432</td>\n",
2253
+ " <td>0.967300</td>\n",
2254
+ " </tr>\n",
2255
+ " <tr>\n",
2256
+ " <td>433</td>\n",
2257
+ " <td>0.999400</td>\n",
2258
+ " </tr>\n",
2259
+ " <tr>\n",
2260
+ " <td>434</td>\n",
2261
+ " <td>0.973100</td>\n",
2262
+ " </tr>\n",
2263
+ " <tr>\n",
2264
+ " <td>435</td>\n",
2265
+ " <td>0.980500</td>\n",
2266
+ " </tr>\n",
2267
+ " <tr>\n",
2268
+ " <td>436</td>\n",
2269
+ " <td>0.995500</td>\n",
2270
+ " </tr>\n",
2271
+ " <tr>\n",
2272
+ " <td>437</td>\n",
2273
+ " <td>0.960300</td>\n",
2274
+ " </tr>\n",
2275
+ " <tr>\n",
2276
+ " <td>438</td>\n",
2277
+ " <td>0.953700</td>\n",
2278
+ " </tr>\n",
2279
+ " <tr>\n",
2280
+ " <td>439</td>\n",
2281
+ " <td>0.993600</td>\n",
2282
+ " </tr>\n",
2283
+ " <tr>\n",
2284
+ " <td>440</td>\n",
2285
+ " <td>0.965100</td>\n",
2286
+ " </tr>\n",
2287
+ " </tbody>\n",
2288
+ "</table><p>"
2289
+ ],
2290
+ "text/plain": [
2291
+ "<IPython.core.display.HTML object>"
2292
+ ]
2293
+ },
2294
+ "metadata": {},
2295
+ "output_type": "display_data"
2296
  }
2297
  ],
2298
  "source": [
2299
+ "\n",
2300
+ "\n",
2301
  "trainer = transformers.Trainer(\n",
2302
+ " model = model,\n",
2303
  " train_dataset = data,\n",
2304
  " args = targs,\n",
2305
  " data_collator=transformers.DataCollatorForLanguageModeling(tokenizer, mlm=False)\n",
res.txt ADDED
@@ -0,0 +1,515 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.
3
+ ### Question: How many games does novica veličković have when there's more than 24 rebounds?
4
+ ### Input: Table 2-16050349-8 has columns Rank (real),Name (text),Team (text),Games (real),Rebounds (real).
5
+ ### Answer: SELECT COUNT Games FROM 2-16050349-8 WHERE Name = 'novica veličković' AND Rebounds > 24
6
+
7
+ Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.
8
+ ### Question: What is the number of capacity at somerset park?
9
+ ### Input: Table 1-11206787-5 has columns Team (text),Stadium (text),Capacity (real),Highest (real),Lowest (real),Average (real).
10
+ ### Answer: SELECT COUNT Capacity FROM 1-11206787-5 WHERE Stadium = 'Somerset Park'
11
+
12
+ Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.
13
+ ### Question: What is the number & name with an Undergoing overhaul, restoration or repairs date?
14
+ ### Input: Table 2-11913905-6 has columns Number & Name (text),Description (text),Livery (text),Owner(s) (text),Date (text).
15
+ ### Answer: SELECT Number & Name FROM 2-11913905-6 WHERE Date = 'undergoing overhaul, restoration or repairs'
16
+
17
+ Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.
18
+ ### Question: What year did Orlando have a School/Club team in Clemson?
19
+ ### Input: Table 2-15621965-7 has columns Player (text),Nationality (text),Position (text),Years in Orlando (text),School/Club Team (text).
20
+ ### Answer: SELECT Years in Orlando FROM 2-15621965-7 WHERE School/Club Team = 'clemson'
21
+
22
+ Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.
23
+ ### Question: How many Deaths have a Fate of damaged, and a Tonnage (GRT) smaller than 4,917?
24
+ ### Input: Table 2-18914307-1 has columns Date (text),Ship Name (text),Flag (text),Tonnage ( GRT ) (real),Fate (text),Deaths (real).
25
+ ### Answer: SELECT COUNT Deaths FROM 2-18914307-1 WHERE Fate = 'damaged' AND Tonnage ( GRT ) < 4,917
26
+
27
+
28
+
29
+
30
+
31
+ {'phase': 1, 'table_id': '1-1000181-1', 'question': 'Tell me what the notes are for South Australia ', 'sql': {'sel': 5, 'conds': [[3, 0, 'SOUTH AUSTRALIA']], 'agg': 0}}
32
+
33
+ 1-1000181-1
34
+ ['State/territory', 'Text/background colour', 'Format', 'Current slogan', 'Current series', 'Notes']
35
+
36
+ {'id': '1-1000181-1', 'header': ['State/territory', 'Text/background colour', 'Format', 'Current slogan', 'Current series', 'Notes'], 'types': ['text', 'text', 'text', 'text', 'text', 'text'], 'rows': [['Australian Capital Territory', 'blue/white', 'Yaa·nna', 'ACT · CELEBRATION OF A CENTURY 2013', 'YIL·00A', 'Slogan screenprinted on plate'], ['New South Wales', 'black/yellow', 'aa·nn·aa', 'NEW SOUTH WALES', 'BX·99·HI', 'No slogan on current series'], ['New South Wales', 'black/white', 'aaa·nna', 'NSW', 'CPX·12A', 'Optional white slimline series'], ['Northern Territory', 'ochre/white', 'Ca·nn·aa', 'NT · OUTBACK AUSTRALIA', 'CB·06·ZZ', 'New series began in June 2011'], ['Queensland', 'maroon/white', 'nnn·aaa', 'QUEENSLAND · SUNSHINE STATE', '999·TLG', 'Slogan embossed on plate'], ['South Australia', 'black/white', 'Snnn·aaa', 'SOUTH AUSTRALIA', 'S000·AZD', 'No slogan on current series'], ['Victoria', 'blue/white', 'aaa·nnn', 'VICTORIA - THE PLACE TO BE', 'ZZZ·562', 'Current series will be exhausted this year']], 'name': 'table_1000181_1'}
37
+ SELECT col5 FROM table WHERE col3 = SOUTH AUSTRALIA
38
+ SELECT Notes FROM table WHERE Current slogan = SOUTH AUSTRALIA
39
+ fatal: destination path 'WikiSQL' already exists and is not an empty directory.
40
+ data/
41
+ data/train.jsonl
42
+ data/test.tables.jsonl
43
+ data/test.db
44
+ data/dev.tables.jsonl
45
+ data/dev.db
46
+ data/test.jsonl
47
+ data/train.tables.jsonl
48
+ data/train.db
49
+ data/dev.jsonl
50
+ /home/matt/hf/sqllama-V0/.venv/lib/python3.7/site-packages/transformers/generation/utils.py:1220: UserWarning: You have modified the pretrained model configuration to control generation. This is a deprecated strategy to control generation and will be removed soon, in a future version. Please use a generation configuration file (see https://huggingface.co/docs/transformers/main_classes/text_generation)
51
+ "You have modified the pretrained model configuration to control generation. This is a"
52
+ ⁇ hey dude, talk to me.
53
+ I'm a 20 year old guy from the UK. I'm a bit of a nerd, I like to read, I like to write, I like to play video games, I like to watch movies, I like to listen
54
+ /home/matt/hf/sqllama-V0/.venv/lib/python3.7/site-packages/bitsandbytes/cuda_setup/main.py:136: UserWarning: /opt/conda did not contain libcudart.so as expected! Searching further paths...
55
+ warn(msg)
56
+ The tokenizer class you load from this checkpoint is not the same type as the class this function is called from. It may result in unexpected tokenization.
57
+ The tokenizer class you load from this checkpoint is 'LLaMATokenizer'.
58
+ The class this function is called from is 'LlamaTokenizer'.
59
+
60
+ ===================================BUG REPORT===================================
61
+ Welcome to bitsandbytes. For bug reports, please submit your error trace to: https://github.com/TimDettmers/bitsandbytes/issues
62
+ ================================================================================
63
+ CUDA SETUP: CUDA runtime path found: /usr/local/cuda/lib64/libcudart.so
64
+ CUDA SETUP: Highest compute capability among GPUs detected: 7.5
65
+ CUDA SETUP: Detected CUDA version 113
66
+ CUDA SETUP: Loading binary /home/matt/hf/sqllama-V0/.venv/lib/python3.7/site-packages/bitsandbytes/libbitsandbytes_cuda113.so...
67
+ True
68
+
69
+ [440/440 11:19:07, Epoch 0/1]
70
+ Step Training Loss
71
+ 1 2.517200
72
+ 2 2.482300
73
+ 3 2.444100
74
+ 4 2.456500
75
+ 5 2.441400
76
+ 6 2.484600
77
+ 7 2.424000
78
+ 8 2.477900
79
+ 9 2.429700
80
+ 10 2.436000
81
+ 11 2.422000
82
+ 12 2.408800
83
+ 13 2.402900
84
+ 14 2.424500
85
+ 15 2.421800
86
+ 16 2.424100
87
+ 17 2.404000
88
+ 18 2.386900
89
+ 19 2.414400
90
+ 20 2.370600
91
+ 21 2.382500
92
+ 22 2.350700
93
+ 23 2.385700
94
+ 24 2.350400
95
+ 25 2.354900
96
+ 26 2.345400
97
+ 27 2.373000
98
+ 28 2.343200
99
+ 29 2.374300
100
+ 30 2.325000
101
+ 31 2.352000
102
+ 32 2.344600
103
+ 33 2.360000
104
+ 34 2.347400
105
+ 35 2.346700
106
+ 36 2.329000
107
+ 37 2.314600
108
+ 38 2.306000
109
+ 39 2.292600
110
+ 40 2.333800
111
+ 41 2.311500
112
+ 42 2.308300
113
+ 43 2.287400
114
+ 44 2.314100
115
+ 45 2.280400
116
+ 46 2.261300
117
+ 47 2.274200
118
+ 48 2.246900
119
+ 49 2.257100
120
+ 50 2.274500
121
+ 51 2.245500
122
+ 52 2.250700
123
+ 53 2.296600
124
+ 54 2.261000
125
+ 55 2.223800
126
+ 56 2.244000
127
+ 57 2.228500
128
+ 58 2.229100
129
+ 59 2.162300
130
+ 60 2.238000
131
+ 61 2.246000
132
+ 62 2.184800
133
+ 63 2.195000
134
+ 64 2.199500
135
+ 65 2.180000
136
+ 66 2.179800
137
+ 67 2.149700
138
+ 68 2.177000
139
+ 69 2.156600
140
+ 70 2.193400
141
+ 71 2.163400
142
+ 72 2.147400
143
+ 73 2.134700
144
+ 74 2.133200
145
+ 75 2.118000
146
+ 76 2.139000
147
+ 77 2.102000
148
+ 78 2.109100
149
+ 79 2.099000
150
+ 80 2.097500
151
+ 81 2.073200
152
+ 82 2.055200
153
+ 83 2.078100
154
+ 84 2.104800
155
+ 85 2.061100
156
+ 86 2.066500
157
+ 87 2.073500
158
+ 88 2.010500
159
+ 89 2.045700
160
+ 90 2.026700
161
+ 91 2.046500
162
+ 92 2.015300
163
+ 93 2.019100
164
+ 94 2.008600
165
+ 95 1.961000
166
+ 96 1.974300
167
+ 97 1.991700
168
+ 98 1.984700
169
+ 99 1.975900
170
+ 100 1.963900
171
+ 101 1.934300
172
+ 102 1.990400
173
+ 103 1.914900
174
+ 104 1.956100
175
+ 105 1.943400
176
+ 106 1.931000
177
+ 107 1.919000
178
+ 108 1.912800
179
+ 109 1.920400
180
+ 110 1.878300
181
+ 111 1.890800
182
+ 112 1.881900
183
+ 113 1.885400
184
+ 114 1.908400
185
+ 115 1.871200
186
+ 116 1.900000
187
+ 117 1.888000
188
+ 118 1.875100
189
+ 119 1.855000
190
+ 120 1.852100
191
+ 121 1.851200
192
+ 122 1.821800
193
+ 123 1.853000
194
+ 124 1.854700
195
+ 125 1.806900
196
+ 126 1.845300
197
+ 127 1.797800
198
+ 128 1.795300
199
+ 129 1.799500
200
+ 130 1.853900
201
+ 131 1.780100
202
+ 132 1.789400
203
+ 133 1.776700
204
+ 134 1.747300
205
+ 135 1.753700
206
+ 136 1.761300
207
+ 137 1.725500
208
+ 138 1.710800
209
+ 139 1.733500
210
+ 140 1.727000
211
+ 141 1.744300
212
+ 142 1.728900
213
+ 143 1.725100
214
+ 144 1.708000
215
+ 145 1.709000
216
+ 146 1.704600
217
+ 147 1.684600
218
+ 148 1.676100
219
+ 149 1.682800
220
+ 150 1.669900
221
+ 151 1.636400
222
+ 152 1.671500
223
+ 153 1.673200
224
+ 154 1.644300
225
+ 155 1.620800
226
+ 156 1.617500
227
+ 157 1.647700
228
+ 158 1.629300
229
+ 159 1.608800
230
+ 160 1.633000
231
+ 161 1.618200
232
+ 162 1.634300
233
+ 163 1.588400
234
+ 164 1.581100
235
+ 165 1.584500
236
+ 166 1.594800
237
+ 167 1.563800
238
+ 168 1.576900
239
+ 169 1.546300
240
+ 170 1.569800
241
+ 171 1.592300
242
+ 172 1.537800
243
+ 173 1.519200
244
+ 174 1.512100
245
+ 175 1.581500
246
+ 176 1.534500
247
+ 177 1.509400
248
+ 178 1.521300
249
+ 179 1.528500
250
+ 180 1.494300
251
+ 181 1.495000
252
+ 182 1.499700
253
+ 183 1.461300
254
+ 184 1.469200
255
+ 185 1.495200
256
+ 186 1.467400
257
+ 187 1.437000
258
+ 188 1.463000
259
+ 189 1.437900
260
+ 190 1.467400
261
+ 191 1.472300
262
+ 192 1.434000
263
+ 193 1.411500
264
+ 194 1.432500
265
+ 195 1.459800
266
+ 196 1.431900
267
+ 197 1.456200
268
+ 198 1.394800
269
+ 199 1.422700
270
+ 200 1.412800
271
+ 201 1.413800
272
+ 202 1.380000
273
+ 203 1.407400
274
+ 204 1.406200
275
+ 205 1.396100
276
+ 206 1.407100
277
+ 207 1.379600
278
+ 208 1.360600
279
+ 209 1.395100
280
+ 210 1.352500
281
+ 211 1.358900
282
+ 212 1.369100
283
+ 213 1.342600
284
+ 214 1.358900
285
+ 215 1.320300
286
+ 216 1.355700
287
+ 217 1.315700
288
+ 218 1.348800
289
+ 219 1.319800
290
+ 220 1.336500
291
+ 221 1.339600
292
+ 222 1.319500
293
+ 223 1.319600
294
+ 224 1.330200
295
+ 225 1.271700
296
+ 226 1.317300
297
+ 227 1.287400
298
+ 228 1.283300
299
+ 229 1.280500
300
+ 230 1.274200
301
+ 231 1.297000
302
+ 232 1.266400
303
+ 233 1.253100
304
+ 234 1.273100
305
+ 235 1.293300
306
+ 236 1.293000
307
+ 237 1.273500
308
+ 238 1.253100
309
+ 239 1.257700
310
+ 240 1.232500
311
+ 241 1.233100
312
+ 242 1.226000
313
+ 243 1.218400
314
+ 244 1.222800
315
+ 245 1.232100
316
+ 246 1.214800
317
+ 247 1.205700
318
+ 248 1.228400
319
+ 249 1.202600
320
+ 250 1.207700
321
+ 251 1.205800
322
+ 252 1.198400
323
+ 253 1.207800
324
+ 254 1.198600
325
+ 255 1.201700
326
+ 256 1.195500
327
+ 257 1.190500
328
+ 258 1.197100
329
+ 259 1.165100
330
+ 260 1.173200
331
+ 261 1.163400
332
+ 262 1.191500
333
+ 263 1.173700
334
+ 264 1.134400
335
+ 265 1.165500
336
+ 266 1.134800
337
+ 267 1.149500
338
+ 268 1.173100
339
+ 269 1.137000
340
+ 270 1.171200
341
+ 271 1.120600
342
+ 272 1.147600
343
+ 273 1.128300
344
+ 274 1.150300
345
+ 275 1.147700
346
+ 276 1.150200
347
+ 277 1.106900
348
+ 278 1.145400
349
+ 279 1.117300
350
+ 280 1.121900
351
+ 281 1.139400
352
+ 282 1.109100
353
+ 283 1.142100
354
+ 284 1.117300
355
+ 285 1.104200
356
+ 286 1.134200
357
+ 287 1.100400
358
+ 288 1.092100
359
+ 289 1.120500
360
+ 290 1.088100
361
+ 291 1.128600
362
+ 292 1.105400
363
+ 293 1.094000
364
+ 294 1.108900
365
+ 295 1.073100
366
+ 296 1.100900
367
+ 297 1.092400
368
+ 298 1.090300
369
+ 299 1.079400
370
+ 300 1.090300
371
+ 301 1.086100
372
+ 302 1.080300
373
+ 303 1.075600
374
+ 304 1.075900
375
+ 305 1.092200
376
+ 306 1.070600
377
+ 307 1.068800
378
+ 308 1.071300
379
+ 309 1.073900
380
+ 310 1.055400
381
+ 311 1.067900
382
+ 312 1.041000
383
+ 313 1.048600
384
+ 314 1.072600
385
+ 315 1.058800
386
+ 316 1.039000
387
+ 317 1.072300
388
+ 318 1.056600
389
+ 319 1.035100
390
+ 320 1.052800
391
+ 321 1.046700
392
+ 322 1.073400
393
+ 323 1.054000
394
+ 324 1.077100
395
+ 325 1.035200
396
+ 326 1.027700
397
+ 327 1.060000
398
+ 328 1.048900
399
+ 329 1.040000
400
+ 330 1.026900
401
+ 331 1.049300
402
+ 332 1.017100
403
+ 333 0.996200
404
+ 334 1.006400
405
+ 335 1.026700
406
+ 336 1.073700
407
+ 337 1.039200
408
+ 338 1.041100
409
+ 339 1.054300
410
+ 340 1.013500
411
+ 341 1.024900
412
+ 342 1.003300
413
+ 343 0.993400
414
+ 344 1.037300
415
+ 345 1.009300
416
+ 346 1.030400
417
+ 347 1.001400
418
+ 348 1.012100
419
+ 349 1.027300
420
+ 350 1.012700
421
+ 351 1.013400
422
+ 352 1.004400
423
+ 353 1.024800
424
+ 354 0.990700
425
+ 355 1.048600
426
+ 356 0.992700
427
+ 357 0.991800
428
+ 358 0.985300
429
+ 359 1.019100
430
+ 360 1.007300
431
+ 361 1.025500
432
+ 362 0.999100
433
+ 363 0.997900
434
+ 364 1.013300
435
+ 365 1.014700
436
+ 366 1.037700
437
+ 367 0.992400
438
+ 368 0.988800
439
+ 369 0.993900
440
+ 370 0.999500
441
+ 371 0.973000
442
+ 372 0.972200
443
+ 373 0.989200
444
+ 374 0.994500
445
+ 375 0.995800
446
+ 376 0.992000
447
+ 377 0.977800
448
+ 378 0.975700
449
+ 379 0.973700
450
+ 380 0.986200
451
+ 381 1.008000
452
+ 382 0.954100
453
+ 383 1.015900
454
+ 384 1.008200
455
+ 385 0.974700
456
+ 386 0.987500
457
+ 387 0.993700
458
+ 388 0.999200
459
+ 389 1.000700
460
+ 390 0.978600
461
+ 391 0.956200
462
+ 392 1.001600
463
+ 393 0.971300
464
+ 394 0.965800
465
+ 395 0.981000
466
+ 396 0.965400
467
+ 397 0.974200
468
+ 398 0.970700
469
+ 399 0.953500
470
+ 400 0.979700
471
+ 401 0.957700
472
+ 402 0.984600
473
+ 403 1.015600
474
+ 404 0.976800
475
+ 405 0.969100
476
+ 406 0.974200
477
+ 407 0.983300
478
+ 408 0.974300
479
+ 409 0.980600
480
+ 410 0.986300
481
+ 411 0.968100
482
+ 412 0.980500
483
+ 413 0.976200
484
+ 414 0.987300
485
+ 415 0.971600
486
+ 416 0.985200
487
+ 417 0.989800
488
+ 418 0.972000
489
+ 419 0.971100
490
+ 420 0.988800
491
+ 421 0.965600
492
+ 422 1.020400
493
+ 423 0.978000
494
+ 424 0.987800
495
+ 425 0.953700
496
+ 426 0.990400
497
+ 427 0.982900
498
+ 428 0.989100
499
+ 429 0.983800
500
+ 430 0.981500
501
+ 431 0.966900
502
+ 432 0.967300
503
+ 433 0.999400
504
+ 434 0.973100
505
+ 435 0.980500
506
+ 436 0.995500
507
+ 437 0.960300
508
+ 438 0.953700
509
+ 439 0.993600
510
+ 440 0.965100
511
+ Dataset({
512
+ features: ['input_ids', 'attention_mask'],
513
+ num_rows: 56355
514
+ })
515
+ {'input_ids': [0, 13866, 338, 263, 1139, 393, 16612, 263, 848, 2009, 29892, 3300, 2859, 411, 385, 1881, 393, 16612, 263, 3758, 1591, 29889, 29871, 14350, 263, 3758, 2346, 393, 5663, 17180, 278, 848, 29889, 13, 2277, 29937, 894, 29901, 24948, 592, 825, 278, 11486, 526, 363, 4275, 8314, 29871, 13, 2277, 29937, 10567, 29901, 6137, 29871, 29896, 29899, 29896, 29900, 29900, 29900, 29896, 29947, 29896, 29899, 29896, 756, 4341, 4306, 29914, 357, 768, 706, 313, 726, 511, 1626, 29914, 7042, 12384, 313, 726, 511, 5809, 313, 726, 511, 7583, 269, 1188, 273, 313, 726, 511, 7583, 3652, 313, 726, 511, 3664, 267, 313, 726, 467, 259, 13, 2277, 29937, 673, 29901, 5097, 29871, 8695, 3895, 29871, 29896, 29899, 29896, 29900, 29900, 29900, 29896, 29947, 29896, 29899, 29896, 5754, 9626, 269, 1188, 273, 353, 525, 6156, 2692, 29950, 319, 29965, 10810, 1964, 10764, 29915, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'attention_mask': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]}
sqllama-out/adapter_config.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_model_name_or_path": "decapoda-research/llama-7b-hf",
3
+ "bias": "none",
4
+ "enable_lora": null,
5
+ "fan_in_fan_out": false,
6
+ "inference_mode": true,
7
+ "lora_alpha": 16,
8
+ "lora_dropout": 0.1,
9
+ "merge_weights": false,
10
+ "modules_to_save": null,
11
+ "peft_type": "LORA",
12
+ "r": 4,
13
+ "target_modules": [
14
+ "q_proj",
15
+ "v_proj"
16
+ ],
17
+ "task_type": "CASUAL_LM"
18
+ }
sqllama-out/adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b91522cb7c4937b649e402207dcaf5bc5e454af98e8178db2d821b0b5fe5bdd
3
+ size 8434381