misikoff commited on
Commit
6c39add
1 Parent(s): e5d0972

feat: add config for for sale listings

Browse files
processors/process_for_sale_listings.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 2,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
@@ -12,7 +12,7 @@
12
  },
13
  {
14
  "cell_type": "code",
15
- "execution_count": 3,
16
  "metadata": {},
17
  "outputs": [],
18
  "source": [
@@ -25,7 +25,7 @@
25
  },
26
  {
27
  "cell_type": "code",
28
- "execution_count": 7,
29
  "metadata": {},
30
  "outputs": [
31
  {
@@ -332,7 +332,7 @@
332
  "[2398149 rows x 13 columns]"
333
  ]
334
  },
335
- "execution_count": 7,
336
  "metadata": {},
337
  "output_type": "execute_result"
338
  }
@@ -436,7 +436,7 @@
436
  },
437
  {
438
  "cell_type": "code",
439
- "execution_count": 6,
440
  "metadata": {},
441
  "outputs": [
442
  {
@@ -713,7 +713,7 @@
713
  "[2398149 rows x 13 columns]"
714
  ]
715
  },
716
- "execution_count": 6,
717
  "metadata": {},
718
  "output_type": "execute_result"
719
  }
@@ -735,7 +735,7 @@
735
  },
736
  {
737
  "cell_type": "code",
738
- "execution_count": 49,
739
  "metadata": {},
740
  "outputs": [],
741
  "source": [
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 8,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
 
12
  },
13
  {
14
  "cell_type": "code",
15
+ "execution_count": 9,
16
  "metadata": {},
17
  "outputs": [],
18
  "source": [
 
25
  },
26
  {
27
  "cell_type": "code",
28
+ "execution_count": 10,
29
  "metadata": {},
30
  "outputs": [
31
  {
 
332
  "[2398149 rows x 13 columns]"
333
  ]
334
  },
335
+ "execution_count": 10,
336
  "metadata": {},
337
  "output_type": "execute_result"
338
  }
 
436
  },
437
  {
438
  "cell_type": "code",
439
+ "execution_count": 11,
440
  "metadata": {},
441
  "outputs": [
442
  {
 
713
  "[2398149 rows x 13 columns]"
714
  ]
715
  },
716
+ "execution_count": 11,
717
  "metadata": {},
718
  "output_type": "execute_result"
719
  }
 
735
  },
736
  {
737
  "cell_type": "code",
738
+ "execution_count": 12,
739
  "metadata": {},
740
  "outputs": [],
741
  "source": [
processors/process_new_constructions.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 59,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
@@ -12,7 +12,7 @@
12
  },
13
  {
14
  "cell_type": "code",
15
- "execution_count": 60,
16
  "metadata": {},
17
  "outputs": [],
18
  "source": [
@@ -25,7 +25,7 @@
25
  },
26
  {
27
  "cell_type": "code",
28
- "execution_count": 61,
29
  "metadata": {},
30
  "outputs": [
31
  {
@@ -268,7 +268,7 @@
268
  "[49487 rows x 10 columns]"
269
  ]
270
  },
271
- "execution_count": 61,
272
  "metadata": {},
273
  "output_type": "execute_result"
274
  }
@@ -360,7 +360,7 @@
360
  },
361
  {
362
  "cell_type": "code",
363
- "execution_count": 62,
364
  "metadata": {},
365
  "outputs": [
366
  {
@@ -588,7 +588,7 @@
588
  "[49487 rows x 10 columns]"
589
  ]
590
  },
591
- "execution_count": 62,
592
  "metadata": {},
593
  "output_type": "execute_result"
594
  }
@@ -610,7 +610,7 @@
610
  },
611
  {
612
  "cell_type": "code",
613
- "execution_count": 63,
614
  "metadata": {},
615
  "outputs": [],
616
  "source": [
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 64,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
 
12
  },
13
  {
14
  "cell_type": "code",
15
+ "execution_count": 65,
16
  "metadata": {},
17
  "outputs": [],
18
  "source": [
 
25
  },
26
  {
27
  "cell_type": "code",
28
+ "execution_count": 66,
29
  "metadata": {},
30
  "outputs": [
31
  {
 
268
  "[49487 rows x 10 columns]"
269
  ]
270
  },
271
+ "execution_count": 66,
272
  "metadata": {},
273
  "output_type": "execute_result"
274
  }
 
360
  },
361
  {
362
  "cell_type": "code",
363
+ "execution_count": 67,
364
  "metadata": {},
365
  "outputs": [
366
  {
 
588
  "[49487 rows x 10 columns]"
589
  ]
590
  },
591
+ "execution_count": 67,
592
  "metadata": {},
593
  "output_type": "execute_result"
594
  }
 
610
  },
611
  {
612
  "cell_type": "code",
613
+ "execution_count": 68,
614
  "metadata": {},
615
  "outputs": [],
616
  "source": [
tester.ipynb CHANGED
@@ -22,46 +22,34 @@
22
  },
23
  {
24
  "cell_type": "code",
25
- "execution_count": 4,
26
  "metadata": {},
27
  "outputs": [
28
  {
29
- "name": "stderr",
30
- "output_type": "stream",
31
- "text": [
32
- "Generating train split: 0 examples [00:00, ? examples/s]\n"
33
- ]
34
- },
35
- {
36
- "ename": "DatasetGenerationError",
37
- "evalue": "An error occurred while generating the dataset",
38
  "output_type": "error",
39
  "traceback": [
40
  "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
41
- "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
42
- "File \u001b[0;32m~/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/datasets/builder.py:1726\u001b[0m, in \u001b[0;36mGeneratorBasedBuilder._prepare_split_single\u001b[0;34m(self, gen_kwargs, fpath, file_format, max_shard_size, split_info, check_duplicate_keys, job_id)\u001b[0m\n\u001b[1;32m 1725\u001b[0m _time \u001b[38;5;241m=\u001b[39m time\u001b[38;5;241m.\u001b[39mtime()\n\u001b[0;32m-> 1726\u001b[0m \u001b[43m\u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mkey\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrecord\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mgenerator\u001b[49m\u001b[43m:\u001b[49m\n\u001b[1;32m 1727\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mmax_shard_size\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mis\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mnot\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mand\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mwriter\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_num_bytes\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m>\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mmax_shard_size\u001b[49m\u001b[43m:\u001b[49m\n",
43
- "File \u001b[0;32m~/.cache/huggingface/modules/datasets_modules/datasets/misikoff--zillow/2cb3d5d954e44d4753cda7ca8de32141b9ae6f4454c1a69b4534a4fe08d54c79/zillow.py:260\u001b[0m, in \u001b[0;36mNewDataset._generate_examples\u001b[0;34m(self, filepath, split)\u001b[0m\n\u001b[1;32m 250\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconfig\u001b[38;5;241m.\u001b[39mname \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnew_constructions\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m 251\u001b[0m \u001b[38;5;66;03m# Yields examples as (key, example) tuples\u001b[39;00m\n\u001b[1;32m 252\u001b[0m \u001b[38;5;28;01myield\u001b[39;00m key, {\n\u001b[1;32m 253\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mRegion ID\u001b[39m\u001b[38;5;124m\"\u001b[39m: data[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mRegion ID\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 254\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSize Rank\u001b[39m\u001b[38;5;124m\"\u001b[39m: data[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSize Rank\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 255\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mRegion\u001b[39m\u001b[38;5;124m\"\u001b[39m: data[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mRegion\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 256\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mRegion Type\u001b[39m\u001b[38;5;124m\"\u001b[39m: data[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mRegion Type\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 257\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mState\u001b[39m\u001b[38;5;124m\"\u001b[39m: data[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mState\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 258\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mHome Type\u001b[39m\u001b[38;5;124m\"\u001b[39m: data[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mHome Type\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 259\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDate\u001b[39m\u001b[38;5;124m\"\u001b[39m: data[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDate\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[0;32m--> 260\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMedian Sale Price\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[43mdata\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mMedian Sale Price\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m,\n\u001b[1;32m 261\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMedian Sale Price per Sqft\u001b[39m\u001b[38;5;124m\"\u001b[39m: data[\n\u001b[1;32m 262\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMedian Sale Price per Sqft\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 263\u001b[0m ],\n\u001b[1;32m 264\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSales Count\u001b[39m\u001b[38;5;124m\"\u001b[39m: data[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSales Count\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 265\u001b[0m \u001b[38;5;66;03m# \"answer\": \"\" if split == \"test\" else data[\"answer\"],\u001b[39;00m\n\u001b[1;32m 266\u001b[0m }\n",
44
- "\u001b[0;31mKeyError\u001b[0m: 'Median Sale Price'",
45
- "\nThe above exception was the direct cause of the following exception:\n",
46
- "\u001b[0;31mDatasetGenerationError\u001b[0m Traceback (most recent call last)",
47
- "Cell \u001b[0;32mIn[4], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m dataset \u001b[38;5;241m=\u001b[39m \u001b[43mload_dataset\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmisikoff/zillow\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mnew_constructions\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtrust_remote_code\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\n",
48
- "File \u001b[0;32m~/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/datasets/load.py:2574\u001b[0m, in \u001b[0;36mload_dataset\u001b[0;34m(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, verification_mode, ignore_verifications, keep_in_memory, save_infos, revision, token, use_auth_token, task, streaming, num_proc, storage_options, trust_remote_code, **config_kwargs)\u001b[0m\n\u001b[1;32m 2571\u001b[0m try_from_hf_gcs \u001b[38;5;241m=\u001b[39m path \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;129;01min\u001b[39;00m _PACKAGED_DATASETS_MODULES\n\u001b[1;32m 2573\u001b[0m \u001b[38;5;66;03m# Download and prepare data\u001b[39;00m\n\u001b[0;32m-> 2574\u001b[0m \u001b[43mbuilder_instance\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdownload_and_prepare\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 2575\u001b[0m \u001b[43m \u001b[49m\u001b[43mdownload_config\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdownload_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2576\u001b[0m \u001b[43m \u001b[49m\u001b[43mdownload_mode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdownload_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2577\u001b[0m \u001b[43m \u001b[49m\u001b[43mverification_mode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mverification_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2578\u001b[0m \u001b[43m \u001b[49m\u001b[43mtry_from_hf_gcs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtry_from_hf_gcs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2579\u001b[0m \u001b[43m \u001b[49m\u001b[43mnum_proc\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mnum_proc\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2580\u001b[0m \u001b[43m \u001b[49m\u001b[43mstorage_options\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstorage_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2581\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 2583\u001b[0m \u001b[38;5;66;03m# Build dataset for splits\u001b[39;00m\n\u001b[1;32m 2584\u001b[0m keep_in_memory \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 2585\u001b[0m keep_in_memory \u001b[38;5;28;01mif\u001b[39;00m keep_in_memory \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m is_small_dataset(builder_instance\u001b[38;5;241m.\u001b[39minfo\u001b[38;5;241m.\u001b[39mdataset_size)\n\u001b[1;32m 2586\u001b[0m )\n",
49
- "File \u001b[0;32m~/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/datasets/builder.py:1005\u001b[0m, in \u001b[0;36mDatasetBuilder.download_and_prepare\u001b[0;34m(self, output_dir, download_config, download_mode, verification_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, file_format, max_shard_size, num_proc, storage_options, **download_and_prepare_kwargs)\u001b[0m\n\u001b[1;32m 1003\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m num_proc \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 1004\u001b[0m prepare_split_kwargs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnum_proc\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m num_proc\n\u001b[0;32m-> 1005\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_download_and_prepare\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1006\u001b[0m \u001b[43m \u001b[49m\u001b[43mdl_manager\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdl_manager\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1007\u001b[0m \u001b[43m \u001b[49m\u001b[43mverification_mode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mverification_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1008\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mprepare_split_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1009\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mdownload_and_prepare_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1010\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1011\u001b[0m \u001b[38;5;66;03m# Sync info\u001b[39;00m\n\u001b[1;32m 1012\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39minfo\u001b[38;5;241m.\u001b[39mdataset_size \u001b[38;5;241m=\u001b[39m \u001b[38;5;28msum\u001b[39m(split\u001b[38;5;241m.\u001b[39mnum_bytes \u001b[38;5;28;01mfor\u001b[39;00m split \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39minfo\u001b[38;5;241m.\u001b[39msplits\u001b[38;5;241m.\u001b[39mvalues())\n",
50
- "File \u001b[0;32m~/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/datasets/builder.py:1767\u001b[0m, in \u001b[0;36mGeneratorBasedBuilder._download_and_prepare\u001b[0;34m(self, dl_manager, verification_mode, **prepare_splits_kwargs)\u001b[0m\n\u001b[1;32m 1766\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_download_and_prepare\u001b[39m(\u001b[38;5;28mself\u001b[39m, dl_manager, verification_mode, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mprepare_splits_kwargs):\n\u001b[0;32m-> 1767\u001b[0m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_download_and_prepare\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1768\u001b[0m \u001b[43m \u001b[49m\u001b[43mdl_manager\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1769\u001b[0m \u001b[43m \u001b[49m\u001b[43mverification_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1770\u001b[0m \u001b[43m \u001b[49m\u001b[43mcheck_duplicate_keys\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mverification_mode\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m==\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mVerificationMode\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mBASIC_CHECKS\u001b[49m\n\u001b[1;32m 1771\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mverification_mode\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m==\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mVerificationMode\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mALL_CHECKS\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1772\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mprepare_splits_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1773\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n",
51
- "File \u001b[0;32m~/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/datasets/builder.py:1100\u001b[0m, in \u001b[0;36mDatasetBuilder._download_and_prepare\u001b[0;34m(self, dl_manager, verification_mode, **prepare_split_kwargs)\u001b[0m\n\u001b[1;32m 1096\u001b[0m split_dict\u001b[38;5;241m.\u001b[39madd(split_generator\u001b[38;5;241m.\u001b[39msplit_info)\n\u001b[1;32m 1098\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1099\u001b[0m \u001b[38;5;66;03m# Prepare split will record examples associated to the split\u001b[39;00m\n\u001b[0;32m-> 1100\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_prepare_split\u001b[49m\u001b[43m(\u001b[49m\u001b[43msplit_generator\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mprepare_split_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1101\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mOSError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 1102\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mOSError\u001b[39;00m(\n\u001b[1;32m 1103\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCannot find data file. \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 1104\u001b[0m \u001b[38;5;241m+\u001b[39m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmanual_download_instructions \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 1105\u001b[0m \u001b[38;5;241m+\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124mOriginal error:\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 1106\u001b[0m \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mstr\u001b[39m(e)\n\u001b[1;32m 1107\u001b[0m ) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n",
52
- "File \u001b[0;32m~/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/datasets/builder.py:1605\u001b[0m, in \u001b[0;36mGeneratorBasedBuilder._prepare_split\u001b[0;34m(self, split_generator, check_duplicate_keys, file_format, num_proc, max_shard_size)\u001b[0m\n\u001b[1;32m 1603\u001b[0m job_id \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[1;32m 1604\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m pbar:\n\u001b[0;32m-> 1605\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mjob_id\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdone\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcontent\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_prepare_split_single\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1606\u001b[0m \u001b[43m \u001b[49m\u001b[43mgen_kwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgen_kwargs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mjob_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mjob_id\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43m_prepare_split_args\u001b[49m\n\u001b[1;32m 1607\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n\u001b[1;32m 1608\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdone\u001b[49m\u001b[43m:\u001b[49m\n\u001b[1;32m 1609\u001b[0m \u001b[43m \u001b[49m\u001b[43mresult\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mcontent\u001b[49m\n",
53
- "File \u001b[0;32m~/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/datasets/builder.py:1762\u001b[0m, in \u001b[0;36mGeneratorBasedBuilder._prepare_split_single\u001b[0;34m(self, gen_kwargs, fpath, file_format, max_shard_size, split_info, check_duplicate_keys, job_id)\u001b[0m\n\u001b[1;32m 1760\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(e, SchemaInferenceError) \u001b[38;5;129;01mand\u001b[39;00m e\u001b[38;5;241m.\u001b[39m__context__ \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 1761\u001b[0m e \u001b[38;5;241m=\u001b[39m e\u001b[38;5;241m.\u001b[39m__context__\n\u001b[0;32m-> 1762\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m DatasetGenerationError(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mAn error occurred while generating the dataset\u001b[39m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01me\u001b[39;00m\n\u001b[1;32m 1764\u001b[0m \u001b[38;5;28;01myield\u001b[39;00m job_id, \u001b[38;5;28;01mTrue\u001b[39;00m, (total_num_examples, total_num_bytes, writer\u001b[38;5;241m.\u001b[39m_features, num_shards, shard_lengths)\n",
54
- "\u001b[0;31mDatasetGenerationError\u001b[0m: An error occurred while generating the dataset"
55
  ]
56
  }
57
  ],
58
  "source": [
59
- "dataset = load_dataset(\"misikoff/zillow\", \"new_constructions\", trust_remote_code=True)"
 
 
60
  ]
61
  },
62
  {
63
  "cell_type": "code",
64
- "execution_count": 5,
65
  "metadata": {},
66
  "outputs": [
67
  {
@@ -79,7 +67,7 @@
79
  " 'Count': 33940}"
80
  ]
81
  },
82
- "execution_count": 5,
83
  "metadata": {},
84
  "output_type": "execute_result"
85
  }
@@ -90,7 +78,7 @@
90
  },
91
  {
92
  "cell_type": "code",
93
- "execution_count": 6,
94
  "metadata": {},
95
  "outputs": [],
96
  "source": [
@@ -99,7 +87,7 @@
99
  },
100
  {
101
  "cell_type": "code",
102
- "execution_count": 24,
103
  "metadata": {},
104
  "outputs": [
105
  {
@@ -111,13 +99,13 @@
111
  " 'Region Type': 'country',\n",
112
  " 'State': None,\n",
113
  " 'Home Type': 'condo/co-op only',\n",
114
- " 'Date': '2018-06-30',\n",
115
- " 'Sale Price': 389394.5,\n",
116
- " 'Sale Price per Sqft': 229.8614501953125,\n",
117
- " 'Count': 4330}"
118
  ]
119
  },
120
- "execution_count": 24,
121
  "metadata": {},
122
  "output_type": "execute_result"
123
  }
 
22
  },
23
  {
24
  "cell_type": "code",
25
+ "execution_count": 3,
26
  "metadata": {},
27
  "outputs": [
28
  {
29
+ "ename": "ValueError",
30
+ "evalue": "BuilderConfig 'for_sale_listings' not found. Available: ['home_value_forecasts', 'new_constructions']",
 
 
 
 
 
 
 
31
  "output_type": "error",
32
  "traceback": [
33
  "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
34
+ "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
35
+ "Cell \u001b[0;32mIn[3], line 3\u001b[0m\n\u001b[1;32m 1\u001b[0m configs \u001b[38;5;241m=\u001b[39m [\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mhome_value_forecasts\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnew_constructions\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mfor_sale_listings\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[0;32m----> 3\u001b[0m dataset \u001b[38;5;241m=\u001b[39m \u001b[43mload_dataset\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmisikoff/zillow\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfor_sale_listings\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtrust_remote_code\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\n",
36
+ "File \u001b[0;32m~/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/datasets/load.py:2548\u001b[0m, in \u001b[0;36mload_dataset\u001b[0;34m(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, verification_mode, ignore_verifications, keep_in_memory, save_infos, revision, token, use_auth_token, task, streaming, num_proc, storage_options, trust_remote_code, **config_kwargs)\u001b[0m\n\u001b[1;32m 2543\u001b[0m verification_mode \u001b[38;5;241m=\u001b[39m VerificationMode(\n\u001b[1;32m 2544\u001b[0m (verification_mode \u001b[38;5;129;01mor\u001b[39;00m VerificationMode\u001b[38;5;241m.\u001b[39mBASIC_CHECKS) \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m save_infos \u001b[38;5;28;01melse\u001b[39;00m VerificationMode\u001b[38;5;241m.\u001b[39mALL_CHECKS\n\u001b[1;32m 2545\u001b[0m )\n\u001b[1;32m 2547\u001b[0m \u001b[38;5;66;03m# Create a dataset builder\u001b[39;00m\n\u001b[0;32m-> 2548\u001b[0m builder_instance \u001b[38;5;241m=\u001b[39m \u001b[43mload_dataset_builder\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 2549\u001b[0m \u001b[43m \u001b[49m\u001b[43mpath\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2550\u001b[0m \u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2551\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata_dir\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdata_dir\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2552\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata_files\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdata_files\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2553\u001b[0m \u001b[43m \u001b[49m\u001b[43mcache_dir\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcache_dir\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2554\u001b[0m \u001b[43m \u001b[49m\u001b[43mfeatures\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfeatures\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2555\u001b[0m \u001b[43m \u001b[49m\u001b[43mdownload_config\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdownload_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2556\u001b[0m \u001b[43m \u001b[49m\u001b[43mdownload_mode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdownload_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2557\u001b[0m \u001b[43m \u001b[49m\u001b[43mrevision\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrevision\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2558\u001b[0m \u001b[43m \u001b[49m\u001b[43mtoken\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2559\u001b[0m \u001b[43m \u001b[49m\u001b[43mstorage_options\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstorage_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2560\u001b[0m \u001b[43m \u001b[49m\u001b[43mtrust_remote_code\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtrust_remote_code\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2561\u001b[0m \u001b[43m \u001b[49m\u001b[43m_require_default_config_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mis\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 2562\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mconfig_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2563\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 2565\u001b[0m \u001b[38;5;66;03m# Return iterable dataset in case of streaming\u001b[39;00m\n\u001b[1;32m 2566\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m streaming:\n",
37
+ "File \u001b[0;32m~/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/datasets/load.py:2257\u001b[0m, in \u001b[0;36mload_dataset_builder\u001b[0;34m(path, name, data_dir, data_files, cache_dir, features, download_config, download_mode, revision, token, use_auth_token, storage_options, trust_remote_code, _require_default_config_name, **config_kwargs)\u001b[0m\n\u001b[1;32m 2255\u001b[0m builder_cls \u001b[38;5;241m=\u001b[39m get_dataset_builder_class(dataset_module, dataset_name\u001b[38;5;241m=\u001b[39mdataset_name)\n\u001b[1;32m 2256\u001b[0m \u001b[38;5;66;03m# Instantiate the dataset builder\u001b[39;00m\n\u001b[0;32m-> 2257\u001b[0m builder_instance: DatasetBuilder \u001b[38;5;241m=\u001b[39m \u001b[43mbuilder_cls\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 2258\u001b[0m \u001b[43m \u001b[49m\u001b[43mcache_dir\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcache_dir\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2259\u001b[0m \u001b[43m \u001b[49m\u001b[43mdataset_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdataset_name\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2260\u001b[0m \u001b[43m \u001b[49m\u001b[43mconfig_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mconfig_name\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2261\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata_dir\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdata_dir\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2262\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata_files\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdata_files\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2263\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mhash\u001b[39;49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdataset_module\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mhash\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2264\u001b[0m \u001b[43m \u001b[49m\u001b[43minfo\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minfo\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2265\u001b[0m \u001b[43m \u001b[49m\u001b[43mfeatures\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfeatures\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2266\u001b[0m \u001b[43m \u001b[49m\u001b[43mtoken\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2267\u001b[0m \u001b[43m \u001b[49m\u001b[43mstorage_options\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstorage_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2268\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mbuilder_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2269\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mconfig_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2270\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 2271\u001b[0m builder_instance\u001b[38;5;241m.\u001b[39m_use_legacy_cache_dir_if_possible(dataset_module)\n\u001b[1;32m 2273\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m builder_instance\n",
38
+ "File \u001b[0;32m~/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/datasets/builder.py:371\u001b[0m, in \u001b[0;36mDatasetBuilder.__init__\u001b[0;34m(self, cache_dir, dataset_name, config_name, hash, base_path, info, features, token, use_auth_token, repo_id, data_files, data_dir, storage_options, writer_batch_size, name, **config_kwargs)\u001b[0m\n\u001b[1;32m 369\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m data_dir \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 370\u001b[0m config_kwargs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdata_dir\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m data_dir\n\u001b[0;32m--> 371\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconfig, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconfig_id \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_create_builder_config\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 372\u001b[0m \u001b[43m \u001b[49m\u001b[43mconfig_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mconfig_name\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 373\u001b[0m \u001b[43m \u001b[49m\u001b[43mcustom_features\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfeatures\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 374\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mconfig_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 375\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 377\u001b[0m \u001b[38;5;66;03m# prepare info: DatasetInfo are a standardized dataclass across all datasets\u001b[39;00m\n\u001b[1;32m 378\u001b[0m \u001b[38;5;66;03m# Prefill datasetinfo\u001b[39;00m\n\u001b[1;32m 379\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m info \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 380\u001b[0m \u001b[38;5;66;03m# TODO FOR PACKAGED MODULES IT IMPORTS DATA FROM src/packaged_modules which doesn't make sense\u001b[39;00m\n",
39
+ "File \u001b[0;32m~/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/datasets/builder.py:592\u001b[0m, in \u001b[0;36mDatasetBuilder._create_builder_config\u001b[0;34m(self, config_name, custom_features, **config_kwargs)\u001b[0m\n\u001b[1;32m 590\u001b[0m builder_config \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbuilder_configs\u001b[38;5;241m.\u001b[39mget(config_name)\n\u001b[1;32m 591\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m builder_config \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mBUILDER_CONFIGS:\n\u001b[0;32m--> 592\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 593\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mBuilderConfig \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mconfig_name\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m not found. Available: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlist\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbuilder_configs\u001b[38;5;241m.\u001b[39mkeys())\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 594\u001b[0m )\n\u001b[1;32m 596\u001b[0m \u001b[38;5;66;03m# if not using an existing config, then create a new config on the fly\u001b[39;00m\n\u001b[1;32m 597\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m builder_config:\n",
40
+ "\u001b[0;31mValueError\u001b[0m: BuilderConfig 'for_sale_listings' not found. Available: ['home_value_forecasts', 'new_constructions']"
 
 
 
 
 
 
 
41
  ]
42
  }
43
  ],
44
  "source": [
45
+ "configs = [\"home_value_forecasts\", \"new_constructions\", \"for_sale_listings\"]\n",
46
+ "\n",
47
+ "dataset = load_dataset(\"misikoff/zillow\", \"for_sale_listings\", trust_remote_code=True)"
48
  ]
49
  },
50
  {
51
  "cell_type": "code",
52
+ "execution_count": 27,
53
  "metadata": {},
54
  "outputs": [
55
  {
 
67
  " 'Count': 33940}"
68
  ]
69
  },
70
+ "execution_count": 27,
71
  "metadata": {},
72
  "output_type": "execute_result"
73
  }
 
78
  },
79
  {
80
  "cell_type": "code",
81
+ "execution_count": 28,
82
  "metadata": {},
83
  "outputs": [],
84
  "source": [
 
87
  },
88
  {
89
  "cell_type": "code",
90
+ "execution_count": 37,
91
  "metadata": {},
92
  "outputs": [
93
  {
 
99
  " 'Region Type': 'country',\n",
100
  " 'State': None,\n",
101
  " 'Home Type': 'condo/co-op only',\n",
102
+ " 'Date': '2018-03-31',\n",
103
+ " 'Sale Price': 386700.0,\n",
104
+ " 'Sale Price per Sqft': 238.31776428222656,\n",
105
+ " 'Count': 4267}"
106
  ]
107
  },
108
+ "execution_count": 37,
109
  "metadata": {},
110
  "output_type": "execute_result"
111
  }
zillow.py CHANGED
@@ -81,6 +81,11 @@ class NewDataset(datasets.GeneratorBasedBuilder):
81
  version=VERSION,
82
  description="This part of my dataset covers a second domain",
83
  ),
 
 
 
 
 
84
  ]
85
 
86
  DEFAULT_CONFIG_NAME = "home_value_forecasts" # It's not mandatory to have a default configuration. Just use one if it make sense.
@@ -140,6 +145,33 @@ class NewDataset(datasets.GeneratorBasedBuilder):
140
  # These are the features of your dataset like images, labels ...
141
  }
142
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
143
  # else: # This is an example to show how to have different features for "home_value_forecasts" and "second_domain"
144
  # features = datasets.Features(
145
  # {
@@ -260,6 +292,26 @@ class NewDataset(datasets.GeneratorBasedBuilder):
260
  "Count": data["Count"],
261
  # "answer": "" if split == "test" else data["answer"],
262
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
263
  # else:
264
  # yield key, {
265
  # "sentence": data["sentence"],
 
81
  version=VERSION,
82
  description="This part of my dataset covers a second domain",
83
  ),
84
+ datasets.BuilderConfig(
85
+ name="for_sale_listings",
86
+ version=VERSION,
87
+ description="This part of my dataset covers a second domain",
88
+ ),
89
  ]
90
 
91
  DEFAULT_CONFIG_NAME = "home_value_forecasts" # It's not mandatory to have a default configuration. Just use one if it make sense.
 
145
  # These are the features of your dataset like images, labels ...
146
  }
147
  )
148
+ elif self.config.name == "for_sale_listings":
149
+ features = datasets.Features(
150
+ {
151
+ "Region ID": datasets.Value(dtype="string", id="Region ID"),
152
+ "Size Rank": datasets.Value(dtype="int32", id="Size Rank"),
153
+ "Region": datasets.Value(dtype="string", id="Region"),
154
+ "Region Type": datasets.Value(dtype="string", id="Region Type"),
155
+ "State": datasets.Value(dtype="string", id="State"),
156
+ "Home Type": datasets.Value(dtype="string", id="Home Type"),
157
+ "Date": datasets.Value(dtype="string", id="Date"),
158
+ "Median Listing Price": datasets.Value(
159
+ dtype="float32", id="Median Listing Price"
160
+ ),
161
+ "Median Listing Price (Smoothed)": datasets.Value(
162
+ dtype="float32", id="Median Listing Price (Smoothed)"
163
+ ),
164
+ "New Listings": datasets.Value(dtype="int32", id="New Listings"),
165
+ "New Listings (Smoothed)": datasets.Value(
166
+ dtype="int32", id="New Listings (Smoothed)"
167
+ ),
168
+ "New Pending (Smoothed)": datasets.Value(
169
+ dtype="int32", id="New Pending (Smoothed)"
170
+ ),
171
+ "New Pending": datasets.Value(dtype="int32", id="New Pending"),
172
+ # These are the features of your dataset like images, labels ...
173
+ }
174
+ )
175
  # else: # This is an example to show how to have different features for "home_value_forecasts" and "second_domain"
176
  # features = datasets.Features(
177
  # {
 
292
  "Count": data["Count"],
293
  # "answer": "" if split == "test" else data["answer"],
294
  }
295
+ elif self.config.name == "for_sale_listings":
296
+ # Yields examples as (key, example) tuples
297
+ yield key, {
298
+ "Region ID": data["Region ID"],
299
+ "Size Rank": data["Size Rank"],
300
+ "Region": data["Region"],
301
+ "Region Type": data["Region Type"],
302
+ "State": data["State"],
303
+ "Home Type": data["Home Type"],
304
+ "Date": data["Date"],
305
+ "Median Listing Price": data["Median Listing Price"],
306
+ "Median Listing Price (Smoothed)": data[
307
+ "Median Listing Price (Smoothed)"
308
+ ],
309
+ "New Listings": data["New Listings"],
310
+ "New Listings (Smoothed)": data["New Listings (Smoothed)"],
311
+ "New Pending (Smoothed)": data["New Pending (Smoothed)"],
312
+ "New Pending": data["New Pending"],
313
+ # "answer": "" if split == "test" else data["answer"],
314
+ }
315
  # else:
316
  # yield key, {
317
  # "sentence": data["sentence"],