# -*- coding: utf-8 -*-

import random
from scrapy.downloadermiddlewares.retry import RetryMiddleware, response_status_message
import logging
import base64
from urllib import parse
import json

logger = logging.getLogger(__name__)


class UserAgentMiddleware(object):
    def process_request(self, request, spider):
        user_agent = random.choice(spider.settings['USER_AGENTS'])
        request.headers['User-Agent'] = user_agent


class CustomRetryMiddleware(RetryMiddleware):
    def process_request(self, request, spider):
        if spider.name == 'fangtianxia':
            if 'captcha' in request.url:
                retries = request.meta.get('retry_times', 0) + 1
                logger.warning(
                    '{}:{} 被重定向到验证码页面，更换代理ip尝试重试，第{}次重试'.format(spider.name, request.url, retries))
                request._url = self.process_retry_url(request.url)
                reason = response_status_message(302)
                return self._retry(request, reason, spider)
            if 'loupan/' in request.url and 'h316' in request.url:
                retries = request.meta.get('retry_times', 0) + 1
                logger.warning(
                    '{}:{} 请求url异常，第{}次重试'.format(spider.name, request.url, retries))
                request_url = request.url
                request._url = request_url.replace('h316/', '')
                reason = response_status_message(200)
                return self._retry(request, reason, spider)
        if spider.name == 'beike_cj_v2':
            if 'captcha' in request.url:
                retries = request.meta.get('retry_times', 0) + 1
                logger.warning(
                    '{}:{} 被重定向到验证码页面，更换代理ip尝试重试，第{}次重试'.format(spider.name, request.url, retries))
                request._url = self.process_retry_url_v2(request.url)
                spider.cookie_str = input('请重新输入登录Cookies：\r\n')
                reason = response_status_message(302)
                return self._retry(request, reason, spider)

    def process_response(self, request, response, spider):
        if request.meta.get('dont_retry', False):
            return response
        if response.status in self.retry_http_codes:
            reason = response_status_message(response.status)
            return self._retry(request, reason, spider) or response
        if spider.name == 'anjuke_building':
            if 'searchCommunity' in request.url or 'buildings' in request.url:
                try:
                    resp = json.loads(response.body.decode())
                    assert '未登录' not in resp['message'] or resp['status'] != str(401)
                except:
                    retries = request.meta.get('retry_times', 0) + 1
                    logger.warning('{} ，登录Cookies失效，第{}次重试'.format(spider.name, retries))
                    spider.cookie_str = input('请重新输入登录Cookies：\r\n')
                    reason = response_status_message(response.status)
                    return self._retry(request, reason, spider) or response
        if spider.name == 'beike_cj_v2':
            if 'login' in request.url and 'redirect' in request.url:
                retries = request.meta.get('retry_times', 0) + 1
                logger.warning('{} ，登录Cookies失效，第{}次重试'.format(spider.name, retries))
                spider.cookie_str = input('请重新输入登录Cookies：\r\n')
                reason = response_status_message(response.status)
                request._url = self.process_redirect_url(request.url)
                return self._retry(request, reason, spider) or response
        if spider.name == 'beike_building':
            if 'yezhu' in request.url:
                try:
                    resp = json.loads(response.body.decode())
                    assert 'login' not in resp['data']['errmsg']
                except:
                    retries = request.meta.get('retry_times', 0) + 1
                    logger.warning('{} ，登录Cookies失效，第{}次重试'.format(spider.name, retries))
                    reason = response_status_message(response.status)
                    spider.cookie_str = input('请重新输入登录Cookies：\r\n')
                    return self._retry(request, reason, spider) or response
        return response

    def process_exception(self, request, exception, spider):
        if isinstance(exception, self.EXCEPTIONS_TO_RETRY) \
                and not request.meta.get('dont_retry', False):
            return self._retry(request, exception, spider)

    @staticmethod
    def process_retry_url(url):
        """
        处理重试的url
        :param url:
        :return:
        """
        try:
            url = parse.parse_qs(parse.urlparse(url).query).get('h')[0]
            assert 'http' in url
        except:
            url = str(base64.b64decode(url), 'utf-8', errors='ignore')
            return url
        else:
            return url

    @staticmethod
    def process_retry_url_v2(url):
        while '%' in url:
            url = parse.unquote(url)
        redirect_url = parse.parse_qs(parse.urlparse(url).query).get('location')[0]
        return redirect_url

    @staticmethod
    def process_redirect_url(url):
        """
        处理重定向的url
        :param url:
        :return:
        """
        while '%' in url:
            url = parse.unquote(url)
        redirect_url = url.split('redirect=')[1]
        return redirect_url


class OuterNetProxyMiddleware(object):
    def process_request(self, request, spider):
        key_name, proxy_ip = spider.redis_conn.brpop('proxy', timeout=30 * 60)
        proxy_dict = {
            "http": "http://hifo:hifo1996@{}:65500".format(proxy_ip),
            "https": "https://hifo:hifo1996@{}:65500".format(proxy_ip)
        }
        request.meta['proxy'] = proxy_dict['http'] if request.url.startswith("http://") else proxy_dict['https']


class AnjukeCookiesMiddleware(object):
    def process_request(self, request, spider):
        if spider.name == 'anjuke' or spider.name == 'anjuke_building':
            # todo 自动化获取登录cookies
            try:
                cookie_str = spider.cookie_str
            except:
                cookie_str = 'ajkAuthTicket=TT=a7a2b29a64f96604dfa0e97dfc406ccf&TS=1711013353552&PBODY=VRsStR2PKOw_j1eU8IXotVvWzRnQXKdGz70hDY9VspL99fZzO4G95_4mxhmayfPl_1uX2rBVrle882prACriSEhUyu_xt5zUMLNxSHJHrKWlLRZckC47-QrllvA6sslBruDYO8xqxbDEsg6LbDglXtxGPRa4pVY1ccFJE1HItYU&VER=2&CUID=GYynNy7qWZ3PjLt_llgibQj5svAO2gEh'
            cookie_dict = {i.split("=", 1)[0]: i.split("=", 1)[1] for i in cookie_str.split("; ")}
            request.cookies = cookie_dict


class BeikeCookiesMiddleware(object):
    def process_request(self, request, spider):
        if (spider.name == 'beike_cj_v2' or spider.name == 'beike_building') and 'ke.com/city' not in request.url:
            # todo 自动化获取登录cookies
            try:
                cookie_str = spider.cookie_str
            except:
                cookie_str = 'login_ucid=2000000076237471; lianjia_token=2.001474a692697a81e105d98fa37292eb10; lianjia_token_secure=2.001474a692697a81e105d98fa37292eb10; security_ticket=Rq/oYpKfVQoTAnTO3fikCkAu2o2+JieQIjIQoh6RNnzvaeGWuYKWTB2zYWYUA0rJuJCDUv6DFIjQpVbNPU5sUdyEiZ6czyYMadrRyCm6BBtL0Xsnq4SKCsZtvEKadITHPsZveTcaGHbdNUSuVOClCeojMmC/lrgXWYd4WVcj2Io=; hip=ASn5ScG1VAQyjvJCjYTL1yPZYUb4v3k8VhnGbma0TKFU-xO5dkmOtG0Ydb5p7Z7yVKLolEuCvOxaDdKdp4ztmql1Ksj3ERwB6trVszyB1MQvv9E%3D'
                cookie_str = 'login_ucid=2000000076237471; lianjia_token=2.001474a692697a81e105d98fa37292eb10; lianjia_token_secure=2.001474a692697a81e105d98fa37292eb10; security_ticket=Rq/oYpKfVQoTAnTO3fikCkAu2o2+JieQIjIQoh6RNnzvaeGWuYKWTB2zYWYUA0rJuJCDUv6DFIjQpVbNPU5sUdyEiZ6czyYMadrRyCm6BBtL0Xsnq4SKCsZtvEKadITHPsZveTcaGHbdNUSuVOClCeojMmC/lrgXWYd4WVcj2Io='
            cookie_dict = {i.split("=", 1)[0]: i.split("=", 1)[1] for i in cookie_str.split("; ")}
            request.cookies = cookie_dict


class FangtianxiaCookiesMiddleware(object):
    def process_request(self, request, spider):
        if spider.name == 'fangtianxia':
            # 手动更换cookies
            # cookie_str = 'global_cookie=4y5fpfgbv8p9lo1f6i0r7kujk17l5hkrltk; city.sig=OGYSb1kOr8YVFH0wBEXukpoi1DeOqwvdseB7aTrJ-zE; g_sourcepage=esf_xq%5Elb_pc; unique_cookie=U_1on6s4izskf7nf5ym963ub9511wl7ii1kt0*2; __utma=147393320.1586000828.1662003628.1662003628.1662003628.1; __utmc=147393320; __utmz=147393320.1662003628.1.1.utmcsr=search.fang.com|utmccn=(referral)|utmcmd=referral|utmcct=/; __utmt_t0=1; __utmt_t1=1; __utmt_t2=1; __utmb=147393320.3.10.1662003628; city=www'
            # cookie_dict = {i.split("=")[0]: i.split("=")[1] for i in cookie_str.split("; ")}
            # 自动化获取cookies
            cookies_list = spider.redis_conn.lrange(spider.settings['COOKIES_REDIS_NAME'], 0, -1)
            cookie_dict = json.loads(random.choice(cookies_list))
            request.cookies = cookie_dict


class TmsfCookiesMiddleware(object):
    def process_request(self, request, spider):
        if spider.name == 'tmsf':
            if 'esf/xq_index' in request.url:
                cookies_list = spider.redis_conn.lrange(spider.settings['COOKIES_REDIS_NAME'], 0, -1)
                cookie_dict = json.loads(random.choice(cookies_list))
                request.cookies = cookie_dict
