import base64
import datetime
import zlib
from urllib.parse import urlencode
import requests
import time
import pymysql
import json


base_url = 'https://apimeishi.meituan.com/meishi/filter/v6/deal/select?'
headers = {
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
                  'Chrome/75.0.3770.90 Safari/537.36',
}

meishi_dict = {
    '56':'江浙菜',
    '36': '小吃快餐',
    '11': '蛋糕甜点',
    '21329': '饮品店',
    '57': '粤菜',
    '17': '火锅',
    '20097': '生日蛋糕',
    '55': '川湘菜',
    '35': '西餐',
    '28': '日韩料理',
    '54': '烧烤烤串',
    '395': '聚餐宴请',
    '20059': '日本菜',
    '20004': '香锅烤鱼',
    '20638': '生鲜蔬果',
    '400': '中式烧烤/烤串',
    '21404': '特色菜'
}


def url_change(url):
    a = []
    for i in url:
        url0 = pymysql.escape_string(i)
        a.append(url0)
    return a


def get_offset(offset, cate_id):
    print('正在爬取第：' + str(offset) + '页')
    params = {
        # 'cityName':'%E5%BE%B7%E6%B8%85',
        # 'cateId': cate_id,
        # 'areaId': '0',
        # 'sort': 'defaults',
        # 'page': '1', 
        # 'uuid': '0acf022f3ed74348880d.1601366700.1.0.0',
        # 'platform': '1',
        # 'partner': '126',
        # 'originUrl': 'https://deqing.meituan.com/meishi/',
        # 'riskLevel': '1',
        # 'optimusCode': '10',
        # '_token': 'eJyFT8uSojAU/ZdspQwESMCdIgoiyqPbV1cvUKAFhAhEaJmaf590lbOZzazO4546995foLFjMJFEURdFAXRJAyZAGotjDATAWj7BoiRrkoqIoiIBXP7xdF0A52Y3B5MPFSmCSvDnjxFw/SHJGAsEa5/CiyJOeQgpPxmbR8CVsXs7gTBO6qz6GpdJxh5RNb7QEnLeXjPIz/h/6CzJSMMQ8N7yjfdyLF4YvZD91S5/kJe22VfFWbLqb3mxePT51A8SaPQFqypn6u9SJzvSvYksc+qo2HzfuyR3Y9tyZpXD94uqMs8wGYmjHHoyzJcbebaXjNyEvVV03Wp2DuGgjuA3SZ8Lw3t693daPVcXL6yZX1tFmq7oPMrNzcLVNGR3dtmNolnt7ZR6mBfX8LAJcqOQb6mzWDZ7/KAhbVkcReyWNkoZONa2PjkHLdaOp7V6+j42Bj3Jg76m52abB1uRxOFVRoiQt6mrrA/3/hDoXe/5Ad0uB7+1huKWgt9/AEPfmx4=',
        'cityName':'%E5%BE%B7%E6%B8%85',
        'cateId': cate_id,
        'areaId': '-1',
        'sort': 'defaults',
        'limit': '25',
        'offset': offset,  # offset
        'utm_medium': 'WEIXINPROGRAM',
        'userId': '599391734',
        # 'uuid': 'C293FFD24758854B729512AA42F7D9E2C8F3B0541D4F5E73F5E751166F1907D9',
        'uuid': '0acf022f3ed74348880d.1601366700.1.0.0',
        'token': encode_token(),
        'version': '9.6.0',
        'version_name': '9.6.0',
        'utm_term': '0',
        'utm_campaign': '0',
        'cityId': '467',
        'lat': '22.51595',
        'lng': '113.3926',
        'myLat': '22.51595',
        'myLng': '113.3926',
        'mypos': '22.51595%2C113.3926',
    }

    url = base_url + urlencode(params)
    try:
        requests.packages.urllib3.disable_warnings()
        response = requests.get(url=url, headers=headers, verify=False)
        if response.status_code == 200:
            print(url)
            return response.json()
    except requests.ConnectionError as e:
        print('Error', e.args)


def parse_offset(jsonfile, cateName):
    if jsonfile:
        items = jsonfile.get('data').get('poiList').get('poiInfos')
        for item in items:
            store = {
                'frontImg': item.get('frontImg').split('%')[0],  # 图片
                'name': item.get('name'),  # 店铺名
                'avgScore': float(item.get('avgScore')),  # 评分
                'foodCateName': item.get('cateName'),  # 类别
                'areaName': item.get('areaName'),  # 店铺地址
                'avgPrice': float(item.get('avgPrice')),  # 人均
                'poiId': item.get('poiid'),
                'cateName': cateName
            }
            yield store


def encode_token():
    ts = int(datetime.datetime.now().timestamp() * 1000)
    token_dict = {
        'rId': 100900,
        'ver': '1.0.6',
        'ts': ts,
        'cts': ts + 100 * 1000,
        'brVD': [1010, 750],
        'brR': [[1920, 1080], [1920, 1040], 24, 24],
        'bI': ['https://gz.meituan.com/meishi/c11/', ''],
        'mT': [],
        'kT': [],
        'aT': [],
        'tT': [],
        'aM': '',
        'sign': 'eJwdjktOwzAQhu/ShXeJ4zYNKpIXqKtKFTsOMLUn6Yj4ofG4UjkM10CsOE3vgWH36df/2gAjnLwdlAPBBsYoR3J/hYD28f3z+PpUnmJEPqYa5UWEm0mlLBRqOSaP1qjEtFB849VeRXJ51nr56AOSVIi9S0E3LlfSzhitMix/mQwsrdWa7aTyCjInDk1mKu9nvOHauCQWq2rB/8laqd3cX+adv0zdzm3nbjTOdzCi69A/HQAHOOyHafMLmEtKXg=='
    }
    # 二进制编码
    encode = str(token_dict).encode()
    # 二进制压缩
    compress = zlib.compress(encode)
    # base64编码
    b_encode = base64.b64encode(compress)
    # 转为字符串
    token = str(b_encode, encoding='utf-8')
    return token


if __name__ == '__main__':
    fileObject = open('jsonFile.json', 'a')
    for offset in range(0, 160):
        if offset % 5 == 0:
            time.sleep(2)
            print('暂时休眠一波')
            for key in meishi_dict:
                jsonfile = get_offset(offset, key)
                results = parse_offset(jsonfile, meishi_dict[key])
                for result in results:
                    print(result)
                    try:
                        jsObj = json.dumps(result,ensure_ascii=False)
                        fileObject.write(jsObj+'\n')
                        fileObject.flush()
                    except:
                        pass
                    # result = json.dumps(result)
                    # f.write(result)
                    # f.write("\n")    # 换行
                    # f.flush()

    fileObject.close()