# -*- coding: utf-8 -*-
import json
import re

import scrapy
from scrapy import Request
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from scrapy.xlib.pydispatch import  dispatcher
from scrapy import signals

from scrapy_selenium_test.items import ScrapySeleniumTestItem


class TestSpider(scrapy.Spider):
    name = 'test'
    allowed_domains = ['jd.com']
    start_urls = ['https://sz.meituan.com/']
    url_init = 'https://sz.meituan.com/meishi/pn{}/'

    def __init__(self):
        # 跳过服务器检测selenium
        self.options = ChromeOptions()
        self.options.add_experimental_option('excludeSwitches', ['enable-automation'])

        self.browser = webdriver.Chrome(options=self.options)

        super(TestSpider,self).__init__()
        dispatcher.connect(self.spider_closed,signals.spider_closed)


    def spider_closed(self):
        print("spider closed")
        self.browser.quit()

    def start_requests(self):
        for num in range(1,68):
            if num == 1:
                url = 'https://sz.meituan.com/meishi/'
            else:
                url = self.url_init.format(num)
            yield Request(url,callback=self.parse)

    def parse(self, response):

        try:
            item = ScrapySeleniumTestItem()
            meishi_list = re.search(r'\"poiLists\":(.*),\"comHeader\"', response.text).group(1)
            meishi_list_dict = json.loads(meishi_list)
            for line in meishi_list_dict['poiInfos']:
                item['poiId'] = line['poiId']
                item['title'] = line['title']
                item['avgScore'] = line['avgScore']
                item['allCommentNum'] = line['allCommentNum']
                item['address'] = line['address']
                item['avgPrice'] = line['avgPrice']
                item['hasAds'] = line['poiId']
                item['adsClickUrl'] = line['adsClickUrl']
                item['adsShowUrl'] = line['adsShowUrl']
                item['frontImg'] = line['frontImg']
                yield item
        except:
            with open("request_error_url.txt","a") as f:
                f.write(response.url + "\n")
