# encoding: utf-8

import os
import json
import requests
from threading import Lock
from concurrent.futures import ThreadPoolExecutor, as_completed
from tqdm import tqdm

with open("./zkh_datas.txt", "r", encoding="utf-8") as f:
    lines = f.readlines()

lines = lines[:5]

lines = [json.loads(line) for line in lines if line.strip()]
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0"}
lock = Lock()


class Spider(object):
    def __init__(self, thread_num=10):
        self.executor = ThreadPoolExecutor(max_workers=thread_num)
        if not os.path.exists("./imgs"):
            os.mkdir("./imgs")
        if not os.path.exists("./imgs/origin"):
            os.mkdir("./imgs/origin")
        if not os.path.exists("./imgs/label"):
            os.mkdir("./imgs/label")
        self.done = 0
        self.all = len(lines)

    @staticmethod
    def request(url):
        try:
            response = requests.get(url, headers=headers)
            return response
        except Exception as e:
            return None

    def start(self):
        all_tasks = []
        for data in lines:
            task = self.executor.submit(self.download, data)
            all_tasks.append(task)
        for _ in as_completed(all_tasks):
            self.done += 1
            print(f"done={self.done}/{self.all}")
            # print("\b"*100)

    def download(self, data: dict):
        # {"detailImages": ["https://private.zkh.com/PRODUCT/PRO/PRO_999999999_01.jpg?x-oss-process=style/common_style&timestamp=1698818993000"],
        # "mainImages": ["https://private.zkh.com/PRODUCT/BIG/BIG_201403_01.jpg?x-oss-process=style/common_stylenew&timestamp=1698818993000"],
        # "skuId": "YK2425"}
        idx = 0
        sku_id = data.get("skuId", "notfound")
        main_images = data.get("mainImages", [])
        detail_images = data.get("detailImages", [])
        for main_img in main_images:
            self.download_single(sku_id, idx, main_img)
            idx += 1
        for detail_img in detail_images:
            self.download_single(sku_id, idx, detail_img)
            idx += 1

    def download_single(self, sku_id: str, idx: int, url: str):
        url = url.split("?")[0]
        origin_url = url + "?x-oss-process=style/common_style"
        label_url = url + "?x-oss-process=style/common_stylenew"
        origin = ""
        label = ""
        if not os.path.exists(f"./imgs/origin/{sku_id}_{idx}.jpg"):
            origin_response = self.request(origin_url)
            if origin_response:
                with open(f"./imgs/origin/{sku_id}_{idx}.jpg", "wb") as f:
                    f.write(origin_response.content)
                origin = f"./imgs/origin/{sku_id}_{idx}.jpg"
        if not os.path.exists(f"./imgs/label/{sku_id}_{idx}.jpg"):
            label_response = self.request(label_url)
            if label_response:
                with open(f"./imgs/label/{sku_id}_{idx}.jpg", "wb") as f:
                    f.write(label_response.content)
                label = f"./imgs/label/{sku_id}_{idx}.jpg"
        # lock.acquire()
        # with open("./labels.txt", "a") as f:
        #     f.write(f"{origin}_&_&{label}\n")
        # lock.release()


if __name__ == '__main__':
    spider = Spider()
    spider.start()
