from multiprocessing import Queue
from multiprocessing import Process
from multiprocessing import Pool
from multiprocessing.queues import Empty
from os import get_terminal_size
from pathlib import Path
from sys import argv
from sys import path

# 导入当前目录中的包
path.insert(0, str(Path().absolute()))

from requests import Session

from zom_spider_lib.tools import ensure_filename
from zom_spider_lib.tools import SimpleHttpDownloader
from zom_spider_lib.site.exhentai import ExhentaiGallarySpider


class ExhentaiImageDownloader(SimpleHttpDownloader):
    def before_download(self, url):
        pagenum, http_url = url
        self.current_pagenum = pagenum
        return http_url

    def after_download(self, data):
        pagenum = self.current_pagenum
        return (pagenum, data)


class ImageSaver(Process):
    save_path: Path
    datum: Queue
    total: int

    def __init__(self, datum: Queue, save_path: str, total: int = 0):
        super().__init__()
        self.datum = datum
        self.save_path = Path(save_path)
        self.total = total
        if not self.save_path.exists():
            self.save_path.mkdir()

    def run(self):
        while True:
            try:
                pagenum, data = self.datum.get(timeout=300)
            except TimeoutError:
                break
            except Empty:
                break
            (self.save_path / f"{pagenum:03d}.png").write_bytes(data)
            print(f"\x1b[32mpage {pagenum}/{self.total} compeleted!\x1b[0m")


def main():
    url = argv[1]
    cookie = Path("cookie.txt").read_text()
    egs = ExhentaiGallarySpider("HelloExhentai")
    egs.config(cookie)

    result = egs.run(url)

    print("="*get_terminal_size()[0])
    print(f"title: {result['title']}")
    print(f"title2: {result['title2']}")
    print()
    download_images(result["pages"], title=result["title"],
                    workers=4, http_session=egs.session)


def download_images(pages: dict, title: str, workers=4, http_session=Session()):
    urls = Queue()
    resps = Queue()
    total = len(pages)
    for i in pages.items():
        urls.put(i)

    dirname = ensure_filename(title)

    im_saver = ImageSaver(resps, dirname, total)
    im_downloaders = [ExhentaiImageDownloader(
        urls, resps) for _ in range(workers)]
    for i in im_downloaders:
        i.config(session=http_session)
        i.start()
    im_saver.start()
    im_saver.join()
    for i in im_downloaders:
        i.join()


if __name__ == "__main__":
    main()
