import os
import os.path
import time
import requests
from bs4 import BeautifulSoup 

headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36"
}

def enter():
    os.chdir(r"D:\code\python\crawl\one")
    dir_name = "pics"
    if not os.path.exists(dir_name):
        os.mkdir(dir_name)
    os.chdir(dir_name)


def down(page, session):
    url = f"http://m.wufazhuce.com/one/{page}"
    pic_name = f"{page}.jpg"
    try:
        with session.get(url, headers=headers) as res:
            soup = BeautifulSoup(res.content, "lxml")
            pic_url = soup.find("img", "item-picture-img").get("src")
    except Exception as e:
        print(e)
  
    try:
        with session.get(pic_url, headers=headers, stream=True) as pic_res:
            with open(pic_name, "wb") as f:
                for item in pic_res.iter_content(512):
                    f.write(item)
                print(f"{pic_name} Download OK (=^-ω-^=)")
    except Exception as e:
        print(e)
    

def main():
    max_page = 3060
    total = 100
    
    enter()
    start = time.time()
    
    with requests.Session() as session:
        for page in range(max_page, max_page-total, -1):
            down(page, session)
    
    print(f"Spend {time.time()-start:.1f}S")


if __name__ == "__main__":
    main()
