import json
import re
from copy import deepcopy

import scrapy
from bs4 import BeautifulSoup
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC


class DzdpSpider(scrapy.Spider):
    name = 'dzdp'
    allowed_domains = ['dianping.com']
    start_urls = ['https://m.dianping.com']

    def start_requests(self):
        for i in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ':
            url = self.start_urls[0] + "/citylist?c={}".format(i)
            yield scrapy.Request(
                url,
                callback=self.parse,
                headers={
                    'Content-Type': 'application/json; charset=utf-8',
                    'Referer': 'https://m.dianping.com/citylist'}
            )

    def __init__(self):
        super().__init__()
        from selenium import webdriver
        # Headless模式，无界面
        options = webdriver.ChromeOptions()
        options.add_argument("--headless")
        # options.add_argument("user-agent='{}'".format(ua))
        self.driver = webdriver.Chrome(options=options)

        # 响应式缩小页面，改变页面布局
        self.driver.set_window_size(450, 1080)

    # def get_element(self, driver, url, attr):
    #     driver.get(url)
    #     elements = driver.find_elements_by_xpath(attr)
    #     wait = WebDriverWait(driver, 10)
    #     # presence_of_element_located为等待条件
    #     # element = wait.until(
    #     #     EC.presence_of_element_located((By.CLASS_NAME, attr))
    #     # )
    #     return elements

    def close(self, reason):
        self.driver.close()

    def parse(self, response):
        li_list = response.xpath("//ul[@class='J_citylist']/li/a")
        for li in li_list:
            dp = {}
            dp["city_name"] = li.xpath("./text()").extract_first()
            city_url = "https:" + li.xpath("./@href").extract_first()
            # city_id = li.xpath("./@data-id").extract_first()
            all_cate_url = self.start_urls[0] + "/allcategory"
            yield scrapy.Request(
                # city_url,
                all_cate_url,
                callback=self.parse_city_cate_info,
                meta={"dp": deepcopy(dp)},
                headers={
                    'Referer': city_url
                }
            )

    # def parse_cate_url(self, response):
    #     item = response.meta["item"]
    #     # 此链接没有数据
    #     # all_cate_url = "https:" + response.xpath('//*[@id="J_slider"]/div/div[3]/a[10]/@data-link').extract_first()
    #     all_cate_url = self.start_urls[0] + "/allcategory"
    #     yield scrapy.Request(
    #         all_cate_url,
    #         callback=self.parse_city_cate_info,
    #         meta={"item": deepcopy(item)},
    #         headers={
    #             'Referer': response.url
    #         }
    #     )

    def parse_city_cate_info(self, response):
        dp = response.meta["dp"]
        cates_hot = "{" + re.findall(r'"_isInit":true},(.*?),"footer"', response.text)[0] + "}"
        big_cates_list = json.loads(cates_hot)["list"]["data"]["list"]

        for big_cates in big_cates_list:
            dp["big_category_name"] = big_cates["name"]
            dp["big_category_en_name"] = big_cates["enName"]
            dp["big_category_id"] = big_cates["id"]

            for cate in big_cates["list"]:
                if cate["name"] != "全部":
                    # foods_cate_id = cates["id"]
                    dp["category_name"] = cate["name"]
                    cate_url = self.start_urls[0] + cate["url"]
                    # print(dp)

                    url = "https://m.dianping.com/isoapi/module"
                    # request_data = {
                    #     {"uuid": "0fc27f2d-b32f-7c05-4879-866a4a5f4ce2.1602335898", "platform": 3, "partner": 150,
                    #      "optimusCode": 10, "originUrl": "https://m.dianping.com/suizhou/ch10/g110",
                    #      "pageEnName": "shopList",
                    #      "moduleInfoList": [{"moduleName": "mapiSearch",
                    #                          "query": {
                    #                              "search": {"start": 0, "categoryId": "110", "parentCategoryId": 10,
                    #                                         "locateCityid": 0,
                    #                                         "limit": 20, "sortId": "0", "cityId": 187, "regionId": 0,
                    #                                         "maptype": 0,
                    #                                         "keyword": ""}}},
                    #                         {"moduleName": "downloadlayer", "query": {}},
                    #                         {"moduleName": "autoopen", "query": {}},
                    #                         {"moduleName": "side-activity", "query": {},
                    #                          "config": {
                    #                              "imageUrl": "https://p0.meituan.net/travelcube/701ffbb879778257dfd58a566dbedc435356.png",
                    #                              "openUrl": "dianping://home",
                    #                              "downloadUrl": "https://m.dianping.com/download/redirect?id=5524665",
                    #                              "url": " https://h5.dianping.com/app/usercenter/coupon.html"}},
                    #                         {"moduleName": "airdrop-layer", "query": {},
                    #                          "config": {"activityId": "airdrop",
                    #                                     "utm": "pmx:all:shoplist_xrhb:m",
                    #                                     "imageUrl": "https://img.meituan.net/dpmobile/ab1152a883c065248f7dffc26975bea8253794.png",
                    #                                     "openUrl": "dianping://web?url=https%3a%2f%2fm.dianping.com%2fnode-newcomer-gift-web%2fpages%2fmain.html%3fnotitlebar%3d1%26cityid%3d*%26dpid%3d*%26latitude%3d*%26longitude%3d*%26utm%3d",
                    #                                     "downloadUrl": "https://m.dianping.com/download/redirect?id=5524665",
                    #                                     "timeout": 1800}}]}
                    # }

                    request_data = {"pageEnName":"shopList","moduleInfoList":[{"moduleName":"mapiSearch","query":{"search":{"start":60,"categoryId":"110","parentCategoryId":10,"locateCityid":0,"limit":20,"sortId":"0","cityId":187,"regionId":0,"maptype":0,"keyword":""}}}]}

                    yield scrapy.Request(
                        cate_url,
                        callback=self.parse_city_foods_list,
                        meta={"dp": deepcopy(dp)},
                        headers={
                            'Referer': response.url,
                            'Host': 'm.dianping.com',
                        }
                    )

    def parse_city_foods_list(self, response):
        dp = response.meta["dp"]

        # html = response.text
        # soup = BeautifulSoup(html, "html.parser")
        # t = soup.find_all(text=re.compile("window.PAGE_INITIAL_STATE = "))[0].strip().split(" = ")[1]
        # data = json.loads(t)
        # print(data)
