Datasets:
import argparse | |
import csv | |
import time | |
from pathlib import Path | |
from urllib.request import urlretrieve | |
wait_time = 1.0 | |
if __name__ == "__main__": | |
parser = argparse.ArgumentParser("Image Downloader") | |
parser.add_argument("--csv", type=str, required=True, help="Path to CSV file of images to download") | |
parser.add_argument("--output", type=str, required=True, help="Path to output directory") | |
args = parser.parse_args() | |
output_dir = Path(args.output) | |
output_dir.mkdir(parents=True, exist_ok=True) | |
with open(args.csv, "r") as f: | |
reader = csv.reader(f) | |
for i, row in enumerate(reader): | |
url = row[3] | |
id_ = row[0] | |
try: | |
urlretrieve(url, output_dir / id_) | |
except: | |
print(f"Failed to download {url}") | |
time.sleep(wait_time) | |