from bs4 import BeautifulSoup
import requests
import json
import re
import os
from concurrent.futures import ThreadPoolExecutor
import threading
from queue import Queue
import time
from retrying import retry
import urllib3

# 1.获取涩图下载链接存入队列
# 2.从队列获取链接保存图片到本地

# urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

def search_getImgUrl(word,size):
    headers = {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.116 Safari/537.36'}
    search_url='https://api.pixivic.com/illustrations?keyword={}&page=1&pageSize={}'.format(word,size)
    res = requests.get(url=search_url,headers=headers)
    res_data = json.loads(res.text)
    temp = res_data['data']
    for res_imgUrl in temp:
        res_imgUrl_list = res_imgUrl['imageUrls']
        res_imgUrl_list_top = res_imgUrl_list[0]
        img_original = res_imgUrl_list_top['original']
        img_original_re = img_original.replace('https://i.pximg.net/','https://original.img.cheerfun.dev/')
        url_queue.put(img_original_re)
        pic_name = img_original_re.split('/')[-1]
        picName_queue.put(pic_name)


def rank_getImgUrl(date,mode,size):
    headers = {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.116 Safari/537.36'}
    rank_url='https://api.pixivic.com/ranks?page=1&date={}&mode={}&pageSize={}'.format(date,mode,size)
    res = requests.get(url=rank_url,headers=headers)
    res_data = json.loads(res.text)
    temp = res_data['data']
    for res_imgUrl in temp:
        res_imgUrl_list = res_imgUrl['imageUrls']
        res_imgUrl_list_top = res_imgUrl_list[0]
        img_original = res_imgUrl_list_top['original']
        img_original_re = img_original.replace('https://i.pximg.net/','https://original.img.cheerfun.dev/')
        url_queue.put(img_original_re)
        pic_name = img_original_re.split('/')[-1]
        picName_queue.put(pic_name)

@retry
def saveImg(name):
    headers = {
        'accept':'image/webp,image/apng,image/*,*/*;q=0.8',
    'accept-encoding':'gzip, deflate, br',
    'accept-languag':'zh-CN,zh;q=0.9,en;q=0.8,pl;q=0.7',
    'sec-fetch-dest':'image',
    'sec-fetch-mode':'no-cors',
    'sec-fetch-site':'cross-site',
    'referer':'https://pixivic.com/popSearch',
    'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.116 Safari/537.36'
    }
    while not url_queue.empty():
        urls = url_queue.get()
        pic_names = picName_queue.get()
        res = requests.get(url=urls,headers=headers,verify=False)
        with open('./'+name+'/'+pic_names,"wb") as file:
            file.write(res.content)
        print("成功下载一张")
        time.sleep(1)

def mkdir(name):
    isExists = os.path.exists(name)
    if not isExists:
        os.makedirs(name)
        print(name+' 文件夹创建成功')
    else:
        print(name+' 文件夹已存在!')
        exit()

def main():
    global size
    name = input("要创建文件夹的名字:")
    mkdir(name)
    memu = input('''
                1.搜索指定涩图
                2.排行榜
                请输入序号:
    ''')
    if memu == '1':
        word = input("涩图关键词:")
        size = input("要多少张涩图:")
        print("获取{}张涩图地址...".format(size))
        search_getImgUrl(word,size)
        print("启动线程池...")
        with ThreadPoolExecutor(max_workers=16) as pool:
                pool.map(saveImg,(name,name,name,name,name,name,name,name,name,name,name,name,name,name,name,name))
    if memu == '2':
        size = input("要多少张涩图:")
        year = input('请输入年份(例:2020):')
        month = input('请输入月份(例:08):')
        day = input('请输入日期(例:01):')
        date = year + '-' + month + '-' + day
        rank = input('''
                    请选择要查询的榜单:
                    1.日榜
                    2.周榜
                    3.月榜
        ''')
        if rank == '1':
            mode = 'day'
            print("获取{}张涩图地址...".format(size))
            rank_getImgUrl(date,mode,size)
            print("启动线程池...")
            with ThreadPoolExecutor(max_workers=16) as pool:
                pool.map(saveImg,(name,name,name,name,name,name,name,name,name,name,name,name,name,name,name,name))
        elif rank == '2':
            mode = 'week'
            print("获取{}张涩图地址...".format(size))
            rank_getImgUrl(date,mode,size)
            print("启动线程池...")
            with ThreadPoolExecutor(max_workers=16) as pool:
                pool.map(saveImg,(name,name,name,name,name,name,name,name,name,name,name,name,name,name,name,name))
        elif rank == '3':
            mode = 'month'
            print("获取{}张涩图地址...".format(size))
            rank_getImgUrl(date,mode,size)
            print("启动线程池...")
            with ThreadPoolExecutor(max_workers=16) as pool:
                pool.map(saveImg,(name,name,name,name,name,name,name,name,name,name,name,name,name,name,name,name))


if __name__ == "__main__":
    url_queue = Queue()
    picName_queue = Queue()
    main()
    print("涩图下载完毕,共{}张,开冲!".format(size))