# -*- coding: utf-8 -*-

# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html

from scrapy import signals
from scrapy.downloadermiddlewares.retry import RetryMiddleware, response_status_message

import time
import random
import logging
import requests
import telnetlib

logger = logging.getLogger(__name__)


class UserAgentMiddleware(object):
    def process_request(self, request, spider):
        USER_AGENTS = spider.settings['USER_AGENTS']
        agent = random.choice(USER_AGENTS)
        request.headers['User-Agent'] = agent


class InnerNetProxyMiddleware(object):
    def process_request(self, request, spider):
        PROXY = spider.settings['PROXY']
        while True:
            proxy = random.choice(PROXY)
            try:
                proxy_ip = proxy.split(':')[0]
                proxy_port = proxy.split(':')[1]
                telnetlib.Telnet(proxy_ip, proxy_port, timeout=3)
            except Exception as e:
                print(proxy, e)
                continue
            else:
                request.meta['proxy'] = 'http://' + proxy if request.url.startswith("http://") else 'https://' + proxy
                break


class OuterNetProxyMiddleware(object):
    def process_request(self, request, spider):
        key_name, proxy_ip = spider.redis_conn.brpop('proxy', timeout=24 * 60 * 60)
        proxy_dict = {
            "http": "http://hifo:hifo1996@{}:65500".format(proxy_ip),
            "https": "https://hifo:hifo1996@{}:65500".format(proxy_ip)
        }
        request.meta['proxy'] = proxy_dict['http'] if request.url.startswith("http://") else proxy_dict['https']


class SfpmProxyMiddleware(object):
    def process_request(self, request, spider):
        keys_list = ['sf-item.taobao.com/sf_item', 'location_code', 'ksTS', 'alicdn', 'susong-item.taobao.com', ]
        retries = request.meta.get('retry_times', 0)
        if retries <= 9:  # 最后一次不添加代理
            for key in keys_list:
                if key in request.url:
                    key_name, proxy_ip = spider.redis_conn.brpop('proxy', timeout=24 * 60 * 60)
                    proxy_dict = {
                        "http": "http://hifo:hifo1996@{}:65500".format(proxy_ip),
                        "https": "https://hifo:hifo1996@{}:65500".format(proxy_ip)
                    }
                    request.meta['proxy'] = proxy_dict['http'] if request.url.startswith("http://") else proxy_dict[
                        'https']
                    break


class SfpmCookiesMiddleware(object):
    def process_request(self, request, spider):
        if spider.name == 'sfpm':
            if 'getSfDealConfirm' in request.url:
                cookie_str = 'miid=859257951161395345; cna=FiDQFb511hMCAbfmLefl5vdT; cookie2=183c36c8c3193506940ab3fc615c42a0; v=0; _tb_token_=f5e611e094464; _samesite_flag_=true; t=834a5413201f952e11e97b22bff80c42; xlly_s=1; uc1=cookie14=Uoe0bUH1E2BAqA%3D%3D; x5sec=7b22676f7661756374696f6e3b32223a223530353763313538383730346365643031326332326633393662343566633535434f6e557366734645492b316964437070732b446777453d227d; tfstk=cCGfB9G3NijjzhvNgqTzbQn9IYPdZaKgzMZqlxgu66mpRmufi21UxVgVEG18pz1..; l=eBQdZSZRQt1a4mzFBOfwourza77tbIRfguPzaNbMiOCPOv568CARWZrKa3YBCnGVHsNeR3lBGc0aBz8wZydwAveONjdiNjMmndC..; isg=BBISzXcHmcES_eQgVN5SZTrsY9j0Ixa9OOsC-NxrJkWa77PpxLDvzTkBX0tTn45V'
                cookie_dict = {i.split("=")[0]: i.split("=")[1] for i in cookie_str.split("; ")}
                request.cookies = cookie_dict


class DailiyunSfpmProxyMiddleware(object):
    def process_request(self, request, spider):
        if spider.name == 'sfpm':
            if 'item_list' in request.url:
                key_name, proxy_ip = spider.redis_conn.brpop('proxy', timeout=3 * 24 * 60 * 60)
                proxy_dict = {
                    "http": "http://hifo:hifo1996@{}:65500".format(proxy_ip),
                    "https": "https://hifo:hifo1996@{}:65500".format(proxy_ip)
                }
                request.meta['proxy'] = proxy_dict['http'] if request.url.startswith("http://") else proxy_dict['https']
            else:
                dailiyun_server = 'http://192.168.5.223:5555/random'
                retries = request.meta.get('retry_times', 0)
                try:
                    resp = requests.get(url=dailiyun_server)
                    assert resp.status_code == 200
                except:
                    logger.error(f'{dailiyun_server}  代理服务器出错，获取代理失败')
                    time.sleep(60 * 2 ** retries)
                else:
                    proxy_dict = {
                        "http": f"http://{resp.content.decode()}",
                        "https": f"https://{resp.content.decode()}",
                    }
                    request.meta['proxy'] = proxy_dict['http'] if request.url.startswith("http://") else proxy_dict[
                        'https']


class KuaidailiSfpmProxyMiddleware(object):
    def process_request(self, request, spider):
        if spider.name == 'sfpm':
            if 'item_list' in request.url:
                key_name, proxy_ip = spider.redis_conn.brpop('proxy', timeout=3 * 24 * 60 * 60)
                proxy_dict = {
                    "http": "http://hifo:hifo1996@{}:65500".format(proxy_ip),
                    "https": "https://hifo:hifo1996@{}:65500".format(proxy_ip)
                }
                request.meta['proxy'] = proxy_dict['http'] if request.url.startswith("http://") else proxy_dict['https']
            else:
                kuaidaili_server = 'http://192.168.5.233:5555/random'
                retries = request.meta.get('retry_times', 0)
                try:
                    resp = requests.get(url=kuaidaili_server)
                    assert resp.status_code == 200
                except:
                    logger.error(f'{kuaidaili_server}  代理服务器出错，获取代理失败')
                    time.sleep(60 * 2 ** retries)
                else:
                    proxy_dict = {
                        "http": f"http://{resp.content.decode()}",
                        "https": f"https://{resp.content.decode()}",
                    }
                    request.meta['proxy'] = proxy_dict['http'] if request.url.startswith("http://") else proxy_dict[
                        'https']


class CustomRetryMiddleware(RetryMiddleware):
    def process_response(self, request, response, spider):
        if request.meta.get('dont_retry', False):
            return response
        if response.status in self.retry_http_codes:
            reason = response_status_message(response.status)
            return self._retry(request, reason, spider) or response
        # 2020/09/24 by ZSQ for 司法拍卖网站反爬升级，省市区三级页面相同ip访问需登录验证
        if spider.name == 'sfpm' and 'item_list' in request.url:
            try:
                resp_str = str(response.body, encoding='gbk', errors='ignore')
                assert 'x5referer' not in resp_str
            except:
                retries = request.meta.get('retry_times', 0) + 1
                logger.warning(
                    '{}:{}  司法拍卖 省市区 数据获取失败，需登录验证，更换代理ip尝试重试，第{}次重试'.format(spider.name, request.url, retries))
                reason = response_status_message(response.status)
                return self._retry(request, reason, spider) or response
        return response

    def process_exception(self, request, exception, spider):
        if isinstance(exception, self.EXCEPTIONS_TO_RETRY) \
                and not request.meta.get('dont_retry', False):
            return self._retry(request, exception, spider)
