#!/usr/bin/env python3
from hashlib import blake2b
import rocksdb
from os.path import join,abspath,dirname
from datetime import date
from json import loads
from proxy import PROXY, proxy_header
from time import sleep
from tzutil.ob import Ob
from urllib.parse import urlencode
import csv
import pathlib
import traceback

from requests import Session


cookie = "ut_ubt_ssid=p9988xtx4jyel916d4m36zkqc548s4ul_2019-12-13; ubt_ssid=r2an235akpebst5mlj6j299im7n9zg8n_2019-12-13; _utrace=b8eecba4bc5ebd5774f04bc2b69eafd9_2019-12-13; cna=wgU3Fcm/jEUCAVNhcQ5fsL8G; cookie2=14a6eff4712ea5935e632fde6342ba9b; t=f6d4870b02335e0df6b677889002964b; _tb_token_=e4e357b14e6d3; track_id=1576204913|7ee00326311617a603c212866cbd0c1902da6badb3d527c75a|4220fa42e3cc17906cdc0ae002d70dc3; ZDS=1.0|1576204913|dz9RtFfNS/RRm8X74kj0+0NL2yNI8CSLN9uaov0iNStjrvpBpnNd2fx6xMNjYD+m; tzyy=b9dc43fd17141b4b6c6554224edc2968; USERID=15630888; UTUSER=15630888; unb=2204225480585; csg=8c290399; t_eleuc4=id4=0%40BA%2FvuHCrrRkY7EIeGh5ycoss6rp%2FftPMFS4fSw%3D%3D; SID=AV0FEdAzWYXkmsXHPMIGqwQUX7lRoSE5a11Q29; x5check_ele=jMCxCdu0TB9z2N7B1U4NAg%3D%3D; l=dB_8muLHqDnVOk16BOCwnurza77OtIRAguPzaNbMi_5dM_KRYS7OkFws2Ep6cjWftWTB4NMB59w9-etXsGUX_F6dmW2LnxDc.; isg=BNLSg3lP25SY_idf_8XkcBiAI56049Z9zEMc25wrswVwr3KphHPojcbBH0s2304V"


ROOT = dirname(abspath(__file__))

CACHE = rocksdb.DB(
    join(ROOT,"cache"),
    rocksdb.Options(create_if_missing=True)
)


session = Session()
session.get("https://h5.ele.me")

class Dump:
    def __init__(self, city):
        self.exist = set()
        outdir = join(ROOT,"csv", str(date.today()))
        pathlib.Path(
            outdir
        ).mkdir(parents=True, exist_ok=True)
        self.file = open(join(outdir,city+".csv"),"w",encoding='utf-8-sig')
        self.csv = csv.writer(self.file)
        self.csv.writerow(
            [
                "编号",
                "经纬度",
                "月销量",
                "店名"
            ]
        )


    def __call__(self, o):
        exist = self.exist
        r = o.restaurant
        name = r.name
        id = r.id
        if id in exist:
            print(name,"已存在")
            return False
        exist.add(id)
        self.csv.writerow((
            id,
            "%s %s"%(r.longitude,r.latitude),
            loads(r.business_info)['recent_order_num_display'].replace('月售',''),
            name
        ))
        return True

#PROXY = 'socks5://127.0.0.1:1080'

def get(url, headers, verify):
    key = blake2b(url.encode('utf-8','ignore')).digest()
    r = CACHE.get(key)
    if r:
        r = loads(r)
        return r
    headers.update(proxy_header())
    while 1:
        try:
            sleep(10)
            r = session.get(
                url,
                timeout=60,
                verify=False,
                proxies=dict(http=PROXY, https=PROXY),
                headers=headers
            )
            j = r.json()
            if verify(j):
                CACHE.put(
                    key,
                    r.content
                )
            return j
        except Exception:
            traceback.print_exc()


class Fetch:
    def __init__(self, city):
        self.li = []
        for p in lnglat(city):
            self.li.append(
                (
                    p,
                    0,
                    ""
                )
            )
        self.dump = Dump(city)

    def __call__(self):
        n = 1
        while self.li:
            print(n)
            n+=1
            self.fetch(
                * self.li.pop()
            )
        self.dump.file.close()

    def fetch(self, lnglat, offset, rank_id=""):
        dump = self.dump
        limit = 30
        lnglat_q = f"latitude={round(lnglat[1],6)}&longitude={round(lnglat[0],6)}"
        query = f"{lnglat_q}&offset={offset}&limit={limit}"
        # url = f"https://h5.ele.me/restapi/shopping/v2/restaurants/search?&{q}&{lnglat_q}"
        # url = f"https://h5.ele.me/restapi/shopping/v2/restaurants/search?offset={offset}&limit={limit}&{q}&latitude={round(lnglat[1],6)}&longitude={round(lnglat[0],6)}&search_item_type=3&is_rewrite=1&extras[]=activities&extras[]=coupon&terminal=h5"

        url = f"https://h5.ele.me/restapi/shopping/v3/restaurants?{query}&extras[]=activities&extras[]=tags&extra_filters=home&order_by=5&rank_id={rank_id}&terminal=h5"
        print(url)

        headers = {
            "cookie":cookie,
            "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.3",
            "referer": f"https://www.ele.me/place/wx4g4hm1fv0q/search/%25E5%25B0%258F%25E6%2581%2592%25E6%25B0%25B4%25E9%25A5%25BA?{lnglat_q}"
        }

        r = Ob(
            **get(
                url,
                headers,
                lambda x:'has_next' in x
            )
        )
        n = 0
        for i in r.items:
            if dump(Ob(**i)):
                n += 1

        if not ( 'has_next' in r ):
            print(r)

        if r.has_next:
            if n >= 3:
                append = lambda x:self.li.append(x)
            else:
                append = lambda x:self.li.insert(0,x)
            append(
                (
                    lnglat,
                    offset+limit,
                    r.meta.rank_id
                )
            )

CITY="""北京
上海
深圳
广州
重庆
天津
成都
武汉
杭州
南京
青岛
长沙
宁波
郑州
西安
福州
济南
合肥
大连
长春
哈尔滨
沈阳
石家庄
南昌
昆明
厦门
南宁
贵阳
太原
乌鲁木齐
呼和浩特
兰州
银川
海口
西宁
拉萨""".split("\n")

if __name__ == "__main__":
    #import single_process.init
    from lnglat import lnglat
    for city in CITY:
        Fetch(city)()
