import time

import requests
from bs4 import BeautifulSoup
import os
from multiprocessing import Pool,Process

headers = {
    # 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
    # 'Accept-Encoding':'gzip, deflate',
    # 'Accept-Language':'zh-CN,zh;q=0.9',
    # 'Cache-Control':'max-age=0',
    # 'Connection':'keep-alive',
    # 'Cookie':'safedog-flow-item=; bdshare_firstime=1529661022580; UM_distinctid=16426e6e178a8-0592fd022525a7-5b183a13-100200-16426e6e179107; CNZZDATA30056528=cnzz_eid%3D802160896-1529656492-http%253A%252F%252Fwww.meizitu.com%252F%26ntime%3D1529656492',
    # 'Host':'www.meizitu.com',
    # 'If-Modified-Since':'Mon, 21 May 2018 16:35:36 GMT',
    # 'If-None-Match':'"902d36bf21f1d31:108f"',
    # 'Referer':'https://www.baidu.com/link?url=fNjwJKXB9SSL2n3Cx5H1E2nP1KHfSuBGdJdGe7AcNK5q0Ne_JImTJsa2J6KDyJv0&wd=&eqid=87eeefba00021387000000065b2cc652',
    # 'Upgrade-Insecure-Requests':'1',
    'User-Agent':'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36',
}
BASE_PATH = '.\\meizi\\'


def get_html(url):
    response = requests.get(url,headers=headers)
    return response


def get_page_urls(url):
    list_page_urls = []
    html = get_html(url).text
    soup = BeautifulSoup(html,'lxml')
    result = soup.select('#picture a')
    for info in result:
        list_page_urls.append(info['href'])

    return list_page_urls


def get_img_urls(list_page_urls,po):
    for page_list in list_page_urls:
        img_urls = []
        html = get_html(page_list).text
        soup = BeautifulSoup(html,'lxml')
        results = soup.select('#picture img')
        for info in results:
            img_urls.append(info['src'])
        po.apply_async(save_img, args=(img_urls,))


# def save_image(list_img):
#     for info in list_img:
#         img = get_html(info).content
#         file_name = info.split('uploads/')[-1].replace('/','_')
#         with open(BASE_PATH+file_name,'wb') as f:
#             f.write(img)


def init_meizi_dir():
    path = os.path.dirname(os.path.abspath(__file__))
    meizidir = os.path.join(path,'meizi')
    if not os.path.exists(meizidir):
        os.mkdir(meizidir)


def save_img(img_urls):
    for url in img_urls:
        img = get_html(url).content
        file_name = url.split('uploads/')[-1].replace('/', '_')
        with open(BASE_PATH+file_name,'wb') as f:
            f.write(img)


def main():
    init_meizi_dir()

    po = Pool(3)

    start = time.time()
    url = 'http://www.meizitu.com/'
    list_page_urls = get_page_urls(url)
    get_img_urls(list_page_urls,po)

    po.close()
    po.join()
    print("%s"%(time.time() - start))
if __name__ == '__main__':
    main()
