# -*- coding: utf-8 -*-
import os
import os.path
import time
import requests
from bs4 import BeautifulSoup
from concurrent.futures import ThreadPoolExecutor, wait

count=0

def enter():
    os.chdir("D:\code\python\crawl\one")
    dir_name = "pics"
    if not os.path.exists(dir_name):
        os.mkdir(dir_name)
    os.chdir(dir_name)


def down(page, session):
    global count 
    url = f"http://m.wufazhuce.com/one/{page}"
    pic_name = f"{page}.jpg"
    headers = {
        "User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36"
    }
    try:
        with session.get(url, headers=headers) as res:
            res.raise_for_status()
            soup = BeautifulSoup(res.content, "lxml")
            pic_url = soup.find("img", "item-picture-img").get("src")
    except requests.exceptions.HTTPError as e:
        print(e)
        count += 1  
    
    try:    
        with session.get(pic_url, headers=headers, stream=True) as pic_res:
            with open(pic_name, "wb") as f:
                for item in pic_res.iter_content(512):
                    f.write(item)
            
    except Exception as e:
        print(e)


def main():     
    max_page = 3035
    total = 100
    enter()
    start = time.time()
    
    with requests.Session() as session:
        executor = ThreadPoolExecutor(max_workers=5)
        tasks = [executor.submit(down, page, session) for page in range(max_page, max_page-total, -1)]
    
    wait(tasks)
    
    print(f"Spend {time.time()-start:.1f}S")
    print(f"pics: {total-count}")


if __name__ == "__main__":
    main()
