import time
from scrapy import Request, FormRequest
from crawler.logger import monitor_logger
from crawler.scrapy_redis_custom.spiders import RedisSpider, RedisCrawlSpider

__all__ = ['BaseSpider', 'BaseCrawlSpider']



class BaseMixin:
    # 默认源
    default_origin_url: str = ''
    # 默认请求
    default_origin_method: str = 'GET'
    # 默认请求
    default_origin_request_type: str = ''
    # 默认头
    default_origin_header: dict = None
    # 默认是否传递cookie
    default_origin_cookiejar: int = None
    # 默认cookie
    default_origin_cookie: dict = None
    # 默认编码方式
    default_origin_encoding: str = 'utf-8'
    # 默认是否使用代理类型: local, short, realtime
    # local 本地IP，部分网站容易被封
    # short 短效代理1~2分钟切换一次，期间使用同一IP
    # realtime 实时切换代理
    default_proxy_type: str = 'local'
    # 是否降级切换(为了节省ip默认会做降级)
    default_proxy_demote: bool = True

    def _get_request_url(self, task):
        seed_val = task.get('trackingNo')
        request_url = task.get('originUrl', self.default_origin_url)
        seed_val = self._get_query_seed_val(seed_val)

        replace_map = [
            ('%(keyword)s', seed_val),
            ('%(trackingNo)s', seed_val),
            ('%(seed_val)s', seed_val),
            ('%(time)s', int(time.time() * 1000)),
            ('%(timestamp)s', int(time.time() * 1000))
        ]

        for pattern in replace_map:
            request_url = request_url.replace(pattern[0], str(pattern[1]), 999)

        return request_url

    def _before_task(self, task):
        return None

    def _get_query_seed_val(self, seed_val):
        return seed_val

    def _get_request_header(self, task):
        return self.default_origin_header

    def _get_request_formdata(self, task):
        return None

    def _get_request_body(self, task):
        return None

    def _get_request_cookie(self, task):
        return self.default_origin_cookie

    def _get_request_meta(self, task):
        return {}

    def start_task(self, task):
        self.logger.info('Start Gen Request')
        self._before_task(task)

        request_urls = self._get_request_url(task)
        request_formdata = self._get_request_formdata(task)
        request_body = self._get_request_body(task)
        request_header = self._get_request_header(task)
        request_cookie = self._get_request_cookie(task)
        request_meta = self._get_request_meta(task)

        # _get_request_url可生成多个链接的列表
        # 当生成单个链时，修正为单一链接
        if isinstance(request_urls, str):
            request_urls = [request_urls]

        for request_url in request_urls:
            basic_meta = {
                'task': task,
                'switch_ip': True,
                'request_url': request_url,
                'request_body': request_body,
                'proxy_type': self.default_proxy_type,
                'proxy_demote': self.default_proxy_demote,
                'cookiejar': self.default_origin_cookiejar,
            }
            basic_meta.update(request_meta)

            if self.default_origin_request_type == 'FormRequest':
                yield FormRequest(
                    request_url,
                    meta=basic_meta,
                    formdata=request_formdata,
                    headers=request_header,
                    cookies=request_cookie,
                    method=self.default_origin_method,
                    encoding=self.default_origin_encoding,
                    callback=self.parse,
                    dont_filter=True
                )
            else:
                yield Request(
                    request_url,
                    meta=basic_meta,
                    body=request_body,
                    headers=request_header,
                    cookies=request_cookie,
                    method=self.default_origin_method,
                    encoding=self.default_origin_encoding,
                    callback=self.parse,
                    dont_filter=True
                )
        self.logger.info('Gen Request : {}'.format(task))
        self.gen_monitor(task)

    def gen_monitor(self, task):
        monitor_logger.info({
            **task,
            'service': self.name,
            'logLevel': 'INFO',
            'crawlerStep': 'start_task',
            'status': 200,
            'message': '',
            'other': '',
        })


class BaseSpider(RedisSpider, BaseMixin):
    pass


class BaseCrawlSpider(RedisCrawlSpider, BaseMixin):
    pass
