import scrapy

import logging
from scrapy_pyppeteer.request import PyppeteerRequest
import json

logger = logging.getLogger(__name__)


class TmsfCookieSpider(scrapy.Spider):
    name = 'tmsf_cookie'
    allowed_domains = ['tmsf.com']
    start_urls = ['https://www.tmsf.com/']
    url_list = ['https://www.tmsf.com/', ]

    custom_settings = {
        'CONCURRENT_REQUESTS': 4,
        'DOWNLOADER_MIDDLEWARES': {
            'scrapy.downloadermiddlewares.retry.RetryMiddleware': None,
            'HifoEsf.middlewares.CustomRetryMiddleware': 500,
            'HifoEsf.middlewares.UserAgentMiddleware': 544,
            'HifoEsf.middlewares.OuterNetProxyMiddleware': 545,  # 启用redis中的代理，需要同时开启 RedisConnPipeline
            'scrapy_pyppeteer.downloadermiddlewares.PyppeteerMiddleware': 566,
        },
        'ITEM_PIPELINES': {
            'HifoEsf.pipelines.RedisConnPipeline': 299,  # 启用redis中的代理，需要同时开启 XXXProxyMiddleware
        },
        # 'RETRY_HTTP_CODES': [500, 502, 503, 504, 400, 404, 408, 407, 302],
        'GERAPY_PYPPETEER_HEADLESS': True,
        'GERAPY_ENABLE_REQUEST_INTERCEPTION': True,
        'GERAPY_PYPPETEER_DEVTOOLS': False,
        'GERAPY_PYPPETEER_DUMPIO': True,
        'GERAPY_PYPPETEER_DOWNLOAD_TIMEOUT': 20,
        'RETRY_TIMES': 10,
        'COOKIES_ENABLED': True,  # 携带cookies
        'PAGE_COOKIES': True,  # 是否返回页面cookies
        'COOKIES_REDIS_NAME': 'tmsf_cookies',
    }

    def start_requests(self):
        for i in range(3):
            for url in self.url_list:
                yield PyppeteerRequest(
                    url,
                    pretend=True,
                    wait_until='load',
                    ignore_resource_types=['image', 'media'],
                    dont_filter=True,
                )

    def parse(self, response, **kwargs):
        page_cookies = response.meta['page_cookies']
        page_cookies_dict = {i['name']: i['value'] for i in page_cookies}
        page_cookies_str = json.dumps(page_cookies_dict)
        redis_pipeline = self.redis_conn.pipeline(transaction=True)
        redis_pipeline.multi()
        redis_pipeline.lpush(self.settings['COOKIES_REDIS_NAME'], page_cookies_str)
        redis_pipeline.ltrim(self.settings['COOKIES_REDIS_NAME'], 0, 3)
        redis_pipeline.execute()
