import re
import time
from scrapy_redis.spiders import RedisSpider
from scrapy import Request
from ..settings import REDIS_HOST, REDIS_PORT, REDIS_PASSWORD, CRAWL_ALL
from ..items import TourCommentCrawlerItem


class BaseSpider(RedisSpider):
    datas_xpath = None
    creator_xpath = None
    create_date_xpath = None
    content_xpath = None
    star_xpath = None
    create_date_pattern = "%Y-%m-%d"
    crawl_all = CRAWL_ALL

    custom_settings = {
        'LOG_LEVEL': 'DEBUG',
        'DOWNLOAD_DELAY': 5,

        # 指定redis数据库的连接参数
        'REDIS_HOST': REDIS_HOST,
        'REDIS_PORT': REDIS_PORT,

        # 指定 redis链接密码，和使用哪一个数据库
        'REDIS_PARAMS': {
            'password': REDIS_PASSWORD,
        },
    }

    def parse(self, response):
        datas = self.get_datas(response)
        for data in datas:
            item = TourCommentCrawlerItem()
            item['creator'] = self.get_creator(data)
            item['create_date'] = self.get_create_date(data)
            item['content'] = self.get_content(data)
            item['star'] = self.get_star(data)
            item['area'] = re.findall("area=([a-z]+)", response.url)[0]
            item['category'] = re.findall("category=([a-z]+)", response.url)[0]
            item['tour_id'] = re.findall("tour_id=(\d+)", response.url)[0]
            item['score'] = self.__get_score(item['create_date'])
            item['ota'] = self.name
            yield item
        if self.crawl_all:
            urls = self.get_urls(response)
            for url in urls:
                yield Request(url, callback=self.parse)
            self.crawl_all = False

    def get_datas(self, response):
        if self.datas_xpath is None:
            raise Exception("获取datas方法需重写:{}".format(self.name))
        return response.xpath(self.datas_xpath)

    def get_creator(self, data):
        return self.__get_field(self.creator_xpath, data)

    def get_create_date(self, data):
        return self.__get_field(self.create_date_xpath, data)

    def get_content(self, data):
        return self.__get_field(self.content_xpath, data)

    def get_star(self, data):
        try:
            return self.__get_field(self.star_xpath, data)
        except IndexError:
            return ""

    @staticmethod
    def __get_field(path, data):
        if path is None:
            raise Exception("方法未定义")
        return data.xpath(path).extract()[0].trim()

    def __get_score(self, create_date):
        score = time.strptime(create_date, self.create_date_pattern)
        return -1 * time.mktime(score)

    def get_urls(self, response):
        page_size = self.get_page_size(response)
        urls = []
        for i in range(2, page_size + 1):
            url_list = response.url.split("=")[:-1]
            url = "=".join(url_list) + "=" + str(i)
            urls.append(url)
        return urls

    def get_page_size(self, response):
        return 1
