from twisted.internet import defer
from twisted.internet.error import TimeoutError, DNSLookupError, \
    ConnectionRefusedError, ConnectionDone, ConnectError, \
    ConnectionLost, TCPTimedOutError
import logging
from scrapy.http import HtmlResponse
from twisted.web.client import ResponseFailed
from scrapy.core.downloader.handlers.http11 import TunnelError
import random
from scrapy.utils.project import get_project_settings
from scrapy.http import HtmlResponse
import json

logger = logging.getLogger(__name__)
settings = get_project_settings()

class ProxyMiddleware(object):
    def process_response(self, request, response, spider):
        print("456")
        return HtmlResponse(response.url,
                            body=response.body + ' i have edit'.encode('utf-8'),
                            encoding='utf-8',
                            request=request)

class UAMiddleware(object):
    """"
        随机选择一个user-agent替换
    """
    def process_request(self, request, spider):
        if not settings['USER_AGENT_LIST'] or len(settings['USER_AGENT_LIST']) == 0:
            logger.error("you need to provide user agent list in setting")
        ua = random.choice(settings['USER_AGENT_LIST'])
        request.headers['User-Agent'] = ua


class ProcessAllExceptionMiddleware(object):
    """
        异常处理的middlewares，40x/50x错误直接将response替换成url=""的response
        ALL_EXCEPTIONS中指定的异常封装成url=”exception"的response
        下面捕获过程中就将错误打印，之所以继续返回时方便爬虫代码自己判断失败情况下做什么后续处理
    """
    ALL_EXCEPTIONS = (defer.TimeoutError, TimeoutError, DNSLookupError,
                      ConnectionRefusedError, ConnectionDone, ConnectError,
                      ConnectionLost, TCPTimedOutError, ResponseFailed,
                      IOError, TunnelError)

    def process_response(self, request, response, spider):
        if str(response.status).startswith('4') or str(response.status).startswith('5'):
            logger.error('Get Error status {}: {}'.format(response.status, response.url))
            response = HtmlResponse(url='')
            return response
        return response

    def process_exception(self, request, exception, spider):
        if isinstance(exception, self.ALL_EXCEPTIONS):
            logger.error('Got exception: %s' % (exception))
            response = HtmlResponse(url='exception')
            return response
        print('not contained exception: %s' % exception)

class LoadCookiesMiddleware(object):
    def __init__(self):
        logger.info("load cookie from path: {}".format(settings["COOKIE_PATH"]))
        self.cookie_dict = {}
        with open(settings["COOKIE_PATH"], 'r', encoding='utf-8') as f:
            cookies_list = json.loads(f.read())
            for cookie in cookies_list:
                self.cookie_dict[cookie['name']] = cookie['value']
        self.x_csrf_token = self.cookie_dict['ct0']

    def process_request(self, request, spider):
        request.cookies = self.cookie_dict
        request.headers['x-csrf-token'] = self.x_csrf_token
        # request.cookies = {
        #     "_ga": "GA1.2.2041272471.1608461041",
        #     "_gid": "GA1.2.898178975.1609079729",
        #     "ads_prefs": "\"HBISAAA=\"",
        #     "auth_token": "67f2a5f1e89c5fccc1c750c436bdcb5cf0dcda73",
        #     "ct0": "cc0e27ebcfe047d7443186a920bd9aafc9e20b0a9726cddab0206d55c44337ea1bcd5c021873c944083e46daba62666fa72dac7bd0361fce9307ddae10f3f285d74d7d199d2746890ccb1915802b8a2f",
        #     "dnt": "1",
        #     "eu_cn": "1",
        #     "guest_id": "v1:160862871556756130",
        #     "kdt": "DNu9iLUYKYcuvPM2w8mTZdEgHY1zCZDPLJMx7rAi",
        #     "personalization_id": "\"v1_6IlSG9eLiUuSSWSXAodn9g==\"",
        #     "remember_checked_on": "1",
        #     "twid": "u=1041609259438419969"
        # }
        return None

    def process_response(self, request, response, spider):
        return response