# :https://place.qyer.com/poi/map/V2UJYVFvBzBTbFI2/
#h href="//place.qyer.com/poi/V2UJYVFvBzBTbFI2/"

import logging
import json
from crawler.items import*
import crawler.db as db


class PositionSpider(scrapy.Spider):
    name = 'PositionSpider'
    allowed_domains = ['place.qyer.com']
    # url = 'https://place.qyer.com/poi/V2UJYVFvBzNTZFI6/'
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36',
        'referer': 'https://place.qyer.com/shanghai/alltravel/'
    }
    custom_settings = {
        'ITEM_PIPELINES': {'crawler.pipelines.PositionPipeline': 300}
    }
    # 每次读取的行数
    size = 10
    # 每次读取的起点
    start = 1
    # 待爬取的连接数量
    remained = 0

    # 已经完成
    finished = False

    def __init__(self):
        self.sql = f"select `spot_id`, `spot_md5` from `{db.name}`.`spot`\
          where `lat` is NULL || `lng` is NULL limit %d, {self.size};"

    def start_requests(self):
        sql = self.sql % (self.start - 1)
        db.cursor.execute(sql)
        results = db.cursor.fetchall()
        length = len(results)
        logging.info(f'查询结果长度{length}')
        if length == 0:
            self.finished = True
            return
        for each in results:
            try:
                meta = {'spot_id': each[0]}
                url = f'https://place.qyer.com/poi/map/{each[1]}/'
                yield scrapy.Request(url=url, headers=self.headers, meta=meta)
            except Exception as e:
                logging.error(f"数据查询出错,each={each},\n error={repr(e)}")
                pass
        self.remained = length
        self.start = self.start + length

    def failed(self, response):
        logging.warning(f"url={response.url}, status={response.status}")

    def parse(self, response):
        if response.url == '':
            yield from self.start_requests()
        # 提取经纬度
        data = response.css('script').re(r'"lat":"\d+\.\d+","lng":"\d+\.\d+"')
        if len(data) == 0:
            logging.warning(f"没有找到经纬度坐标")
            self.failed(response)
            return
        try:
            obj = json.loads('{%s}' % data[0])
            item = PositionItem()
            item['spot_id'] = response.meta['spot_id']
            item['lat'] = obj['lat']
            item['lng'] = obj['lng']
            logging.info(f"spot_id={item['spot_id'] }, lat={obj['lat']}, lng={obj['lng']}")
            yield item
        except Exception as e:
            logging.error(f"json解析出错！,data={data} \n error={repr(e)}")
            self.failed(response)

        self.remained = self.remained - 1
        if self.remained <= 0:
            if not self.finished:
                yield from self.start_requests()
