# -*- coding: utf-8 -*-
import json
import re
import time

import requests
import scrapy
from scrapy import Selector

from scrapy import Request
from fake_useragent import UserAgent

from shop.items import ShopItem
from shop.settings import PROXY_CACHE
from shop.spiders.utlis import json_html


class MeituanSpider(scrapy.Spider):
    name = 'meituan'
    start_urls = ['https://cd.meituan.com/meishi/']

    def parse(self, response):
        script2 = json_html(response)
        city_set = script2['filters']["areas"]
        for citys in city_set:
            for city in citys['subAreas']:
                print(city['name'])
                if city['name'] != '全部':
                    url = city['url']
                    yield Request(url, callback=self.city_parse)

    def city_parse(self, response):
        if response.meta.get('redirect_urls', None):
            print('='*100)
            print(response.meta.get('redirect_urls')[0])
            print(response.url)
            time.sleep(5)
            yield Request(url=response.meta.get('redirect_urls')[0], callback=self.shop_url_parse)
        else:
            script2 = json_html(response)
            item = script2['poiLists']['totalCounts']
            page = (item - 1) // 15 + 1
            for num in range(1, page + 1):
                url = f'https://cd.meituan.com/meishi/pn{num}/'
                yield Request(url=url, callback=self.shop_url_parse)

    def shop_url_parse(self, response):
        if response.meta.get('redirect_urls', None):
            print('=' * 100)
            print(response.meta['redirect_urls'][0])
            print(response.url)
            time.sleep(5)
            yield Request(url=response.meta['redirect_urls'][0], callback=self.shop_url_parse)
        else:
            url = response.url
            ua = UserAgent()
            html = requests.get(url, headers={'User-Agent': ua.random}).text
            pattern = r'poiId.*?(\d+)'
            poiId = re.findall(pattern, html, re.S)
            for pid in poiId:
                url = f'https://cd.meituan.com/meishi/{pid}/'
                yield Request(url=url, callback=self.shop_parse)

    def shop_parse(self, response):
        if response.meta.get('redirect_urls', None):
            print('=' * 100)
            print(response.meta['redirect_urls'][0])
            print(response.url)
            time.sleep(5)
            yield Request(url=response.meta['redirect_urls'][0], callback=self.shop_parse)
        else:
            item = ShopItem()
            f = json_html(response)
            info = f["detailInfo"]
            item['name'] = info['name']
            item['score'] = info['avgScore']
            item['tel'] = info['phone']
            item['address'] = info['address']
            item['time'] = info['openTime']
            item['avg_price'] = info['avgPrice']
            item['is_wifi'] = 1 if info['extraInfos'] != [] else 0
            item['longitude'] = info['longitude']
            item['latitude'] = info['latitude']
            item['picture'] = f['photos']['frontImgUrl']
            yield item
