File size: 7,927 Bytes
256e6ed
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
#!/usr/bin/env python3
"""
T-ECD Dataset Downloader
Supports selective downloading by domains and date ranges.
"""

import argparse
import concurrent.futures
import os
import sys
from dataclasses import dataclass
from getpass import getpass
from pathlib import Path
from typing import List, Tuple

import polars as pl
from huggingface_hub import snapshot_download
from tqdm import tqdm


@dataclass
class DownloadConfig:
    """Configuration for dataset download parameters."""

    token: str
    dataset_path: str = "dataset/full"
    local_dir: str = "t_ecd_full"
    domains: Tuple[str, ...] = (
        "retail",
        "marketplace",
        "offers",
        "reviews",
        "payments",
    )
    day_begin: int = 0
    day_end: int = 1308  # inclusive
    max_workers: int = 20


class DatasetDownloader:
    """Handles downloading of T-ECD dataset."""

    STATIC_FILES = ["users.pq", "brands.pq"]
    DOMAIN_ITEMS = ["retail", "marketplace", "offers"]
    ALL_DOMAINS = ["retail", "marketplace", "offers", "reviews", "payments"]

    def __init__(self, config: DownloadConfig):
        self.config = config
        self._ensure_local_dir()

    def _ensure_local_dir(self) -> None:
        """Create local directory if it doesn't exist."""
        Path(self.config.local_dir).mkdir(parents=True, exist_ok=True)

    def _generate_file_patterns(self) -> List[str]:
        """Generate all file patterns to download based on configuration."""
        patterns = []

        # Add static files
        patterns.extend(
            f"{self.config.dataset_path}/{file}" for file in self.STATIC_FILES
        )

        # Add domain-specific item files
        patterns.extend(
            f"{self.config.dataset_path}/{domain}/items.pq"
            for domain in self.config.domains
            if domain in self.DOMAIN_ITEMS
        )

        # Add daily files for each domain
        for domain in self.config.domains:
            for day in range(self.config.day_begin, self.config.day_end + 1):
                day_str = str(day).zfill(5)
                patterns.extend(self._get_domain_day_patterns(domain, day_str))

        return patterns

    def _get_domain_day_patterns(self, domain: str, day_str: str) -> List[str]:
        """Get file patterns for a specific domain and day."""
        base_path = f"{self.config.dataset_path}/{domain}"

        if domain in ["retail", "marketplace", "offers"]:
            return [f"{base_path}/events/{day_str}.pq"]
        elif domain == "payments":
            return [
                f"{base_path}/events/{day_str}.pq",
                f"{base_path}/receipts/{day_str}.pq",
            ]
        elif domain == "reviews":
            return [f"{base_path}/{day_str}.pq"]

        return []

    def _download_single_file(self, pattern: str) -> Tuple[str, bool]:
        """Download a single file."""
        try:
            snapshot_download(
                repo_id="t-tech/T-ECD",
                repo_type="dataset",
                allow_patterns=pattern,
                local_dir=self.config.local_dir,
                token=self.config.token,
            )
            return pattern, True
        except Exception as e:
            print(f"Error downloading {pattern}: {e}")
            return pattern, False

    def download(self) -> List[str]:
        """Download all specified files in parallel.

        Returns:
            List of failed download patterns
        """
        patterns = self._generate_file_patterns()
        print(f"Downloading {len(patterns)} files to {self.config.local_dir}")

        with concurrent.futures.ThreadPoolExecutor(
            max_workers=self.config.max_workers
        ) as executor:
            # Submit all download tasks
            future_to_pattern = {
                executor.submit(self._download_single_file, pattern): pattern
                for pattern in patterns
            }

            # Process results with progress bar
            results = []
            for future in tqdm(
                concurrent.futures.as_completed(future_to_pattern),
                total=len(patterns),
                desc="Downloading files",
            ):
                results.append(future.result())

        # Report results
        successful = sum(1 for _, status in results if status)
        failed = [pattern for pattern, status in results if not status]

        print(f"Download completed: {successful}/{len(patterns)} files successful")

        if failed:
            print("Failed downloads:")
            for pattern in sorted(failed):
                print(f"  - {pattern}")

        return failed


def create_config_from_args(args) -> DownloadConfig:
    """Create DownloadConfig from command line arguments."""
    token = args.token or os.getenv("HF_TOKEN")
    if not token:
        token = getpass("Enter your Hugging Face token: ")

    domains = args.domains if args.domains else DatasetDownloader.ALL_DOMAINS

    return DownloadConfig(
        token=token,
        dataset_path=args.dataset_path,
        local_dir=args.local_dir,
        domains=tuple(domains),
        day_begin=args.day_begin,
        day_end=args.day_end,
        max_workers=args.max_workers,
    )


def download_dataset(
    token: str,
    local_dir: str = "t_ecd_full",
    dataset_path: str = "dataset/full",
    domains: List[str] = None,
    day_begin: int = 1307,
    day_end: int = 1308,
    max_workers: int = 20,
) -> List[str]:
    """High-level function to download T-ECD dataset.

    Args:
        token: Hugging Face authentication token
        local_dir: Local directory to save dataset
        dataset_path: Path within the dataset repository
        domains: List of domains to download
        day_begin: Start day (inclusive)
        day_end: End day (inclusive)
        max_workers: Number of parallel download workers

    Returns:
        List of failed download patterns
    """
    if domains is None:
        domains = DatasetDownloader.ALL_DOMAINS

    config = DownloadConfig(
        token=token,
        local_dir=local_dir,
        dataset_path=dataset_path,
        domains=tuple(domains),
        day_begin=day_begin,
        day_end=day_end,
        max_workers=max_workers,
    )

    downloader = DatasetDownloader(config)
    return downloader.download()


def main():
    """Main execution function."""
    parser = argparse.ArgumentParser(
        description="Download T-ECD dataset from Hugging Face Hub"
    )
    parser.add_argument(
        "--token", "-t", help="Hugging Face token (or set HF_TOKEN env var)"
    )
    parser.add_argument(
        "--local-dir",
        "-d",
        default="t_ecd_full",
        help="Local directory to save dataset",
    )
    parser.add_argument(
        "--dataset-path",
        "-p",
        default="dataset/full",
        help="Path within the dataset repository",
    )
    parser.add_argument(
        "--domains",
        "-m",
        nargs="+",
        choices=DatasetDownloader.ALL_DOMAINS,
        help="Domains to download (default: all)",
    )
    parser.add_argument(
        "--day-begin", "-b", type=int, default=1307, help="Start day (inclusive)"
    )
    parser.add_argument(
        "--day-end", "-e", type=int, default=1308, help="End day (inclusive)"
    )
    parser.add_argument(
        "--max-workers",
        "-w",
        type=int,
        default=20,
        help="Number of parallel download workers",
    )

    args = parser.parse_args()

    try:
        config = create_config_from_args(args)
        downloader = DatasetDownloader(config)
        failed_downloads = downloader.download()

        exit_code = 1 if failed_downloads else 0
        sys.exit(exit_code)

    except KeyboardInterrupt:
        print("\nDownload cancelled by user")
        sys.exit(1)
    except Exception as e:
        print(f"Fatal error: {e}")
        sys.exit(1)


if __name__ == "__main__":
    main()