# -*- coding: utf-8 -*- 
# @Time : 2021/4/9 21:23 
# @Author : Dong 
# @File : fresh_snaps.py
import json
import os
import re
import time
from multiprocessing import Pool

import requests
from urllib.parse import urlencode
_workspace = r'G:\fresh_snap'
num = 1056
headers = {
    'Accept': 'application/json, text/javascript, */*; q=0.01',
    'Accept-Encoding': 'gzip, deflate, br',
    'Accept-Language': 'en,zh-CN;q=0.9,zh;q=0.8',
    'Connection': 'keep-alive',
    'Cookie': 'Hm_lvt_d24dcf008a97469875a4da33090711f9=1617974215; Hm_lpvt_d24dcf008a97469875a4da33090711f9=1617974215; __utma=53386493.670124061.1617974211.1617974211.1617974211.1; __utmc=53386493; __utmz=53386493.1617974211.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); __utmt=1; __utmb=53386493.1.10.1617974211; _fbp=fb.1.1617974217681.1200694037',
    'Host': 'droptokyo.com',
    # 'Referer': 'https://droptokyo.com/freshsnaps/?page_num=4',
    'sec-ch-ua': '"Google Chrome";v="89", "Chromium";v="89", ";Not A Brand";v="99"',
    'sec-ch-ua-mobile': '?0',
    'Sec-Fetch-Dest': 'empty',
    'Sec-Fetch-Mode': 'cors',
    'Sec-Fetch-Site': 'same-origin',
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36',
    'X-Requested-With': 'XMLHttpRequest',
}

def get_one_page(num):
    data = {
        'type': 'freshsnaps',
        'page_num': num
    }
    url = 'https://droptokyo.com/api/?' + urlencode(data)
    time.sleep(1)
    try:
        response = requests.get(url,headers=headers,timeout=20)
        response = response.json()
        for x in response.get('freshsnaps'):
            detail_url = x.get('url')
            print(detail_url)
            com = re.compile('(\d+)')
            parrent = re.search(com, detail_url).group()
            parse_one_detail(parrent)
    except Exception:
        return None


def parse_one_detail(par_mess):
    data2 = {
        'type': 'freshsnap',
        'id': par_mess
    }
    time.sleep(1)
    fresh = 'https://droptokyo.com/api/?%s'% urlencode(data2)
    try:
        response = requests.get(fresh,timeout=20)
        response = response.json()
        for x, y in response.get('freshsnaps').items():
            if x == 'image':
                for i in y:
                    image_url = i.get('image')
                    save_photo(image_url)
    except Exception:
        return None

def save_photo(image_url):
    global num
    time.sleep(1)
    try:
        img = requests.get(image_url,timeout=20)
        suffix = image_url.split('.')
        suffix = suffix[-1].strip()
        mess = '%s.%s'% (num,suffix)
        work_path = os.path.join(_workspace,mess)
        f = open(work_path, 'ab')
        f.write(img.content)
        print(image_url, '图片保存成功！')
        f.close()
        num += 1
    except Exception:
        print(image_url, '图片保存fail！')
        return None

def main(num):
    get_one_page(num)

if __name__ == '__main__':
    for i in range(17,25):
        print(i)
        main(i)
    pool = Pool(processes=10)
    # pool.map(main,[i for i in range(2,10)])
    # 爬到了17页