#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time    : 2021/7/28 11:45
# @Author  : Samge
import logging
from scrapy import signals
from itkz.resources.db.redis import RedisBase
from itkz.resources.utils import log, u_proxy
from itkz.spider_ext.middlewares.m_base import BaseMiddleware

logger = logging.getLogger(__name__)


class ProxyMiddleware(BaseMiddleware):

    def __init__(self, crawler, host, port, db_index, db_psd):
        super(ProxyMiddleware, self).__init__(crawler)

        if not self.is_use_proxy:
            return

        self.is_every_request_random_proxy = crawler.settings.get('IS_EVERY_REQUEST_RANDOM_PROXY')
        self.cliredis = RedisBase(host=host, port=port, db_index=db_index, password=db_psd)
        crawler.signals.connect(self.close_connect, signal=signals.spider_closed)

        self.proxy = u_proxy.get_proxy_list(self.cliredis.hgetall('proxy'))  # 这个返回的都是字符串，需要将状态改为整型
        for key, value in self.proxy.items():
            self.proxy[key] = int(value)  # 代理字典 形如{'http://192.168.2.123': 0, 'http://192.168.2.124': 1}

        if not hasattr(crawler.spider, 'spider_proxy'):
            log.log_info(None, '动态添加代理字典')
            crawler.spider.spider_proxy = self.proxy  # 动态添加代理字典，方便重试中间件使用

    @classmethod
    def from_crawler(cls, crawler):
        return cls(
            crawler,
            host=crawler.settings.get('REDIS_HOST'),
            port=crawler.settings.get('REDIS_PORT'),
            db_index=crawler.settings.get('REDIS_DB'),
            db_psd=crawler.settings.get('REDIS_PASSWORD')
        )

    def close_connect(self):
        self.cliredis.close_connect()

    def get_vps_by_balance(self, spider):
        register_proxy = u_proxy.get_proxy_list(self.cliredis.hgetall('proxy'))  # 从拨号服务注册中心获取代理信息
        buffer_proxy = {}
        # 删除拨号服务注册中心撤销的代理，可能有些VPS不用了，所以内存中也必须删掉
        keys = self.proxy.keys() - register_proxy.keys()
        for key in keys:
            self.proxy.pop(key)

        for key, value in register_proxy.items():
            if value == '0':
                if key not in self.proxy.keys():
                    self.proxy[key] = 0  # 服务中心新增的代理,可能马上就变为不可用
                    log.log_info(spider, '拨号服务注册中心新增代理___{}'.format(key))
                else:
                    # log.log_info(spider, '代理___{}状态即将变为不可用,其当前连接数为{}'.format(key, self.proxy[key]))
                    self.proxy[key] = 0

            else:
                if key not in self.proxy.keys():
                    self.proxy[key] = 1  # 服务中心新增的代理
                    log.log_info(spider, '拨号服务注册中心新增代理___{}'.format(key))
                if self.proxy[key] == 0:
                    self.proxy[key] = 1  # 把内存中状态不能用的修改为能用，值大于等于1代表可用，值为0代表不可用
                    log.log_info(spider, '代理___{}状态变为可用'.format(key))
                buffer_proxy[key] = self.proxy[key]

        connect_min = min(buffer_proxy, key=lambda x: buffer_proxy[x])
        log.log_info(spider, '获取到代理___{}__其连接数为：{}'.format(connect_min, self.proxy[connect_min]))
        return connect_min

    def process_request(self, request, spider):

        if not self.is_use_proxy:
            return

        if request.meta.get('crack_captcha') or '192.168.' in request.url:
            log.log_info(spider,  '本次请求在meta中配置了crack_captcha或者为本地链接,不使用代理___{}'.format(request.url))
            return

        if self.is_every_request_random_proxy:
            log.log_info(spider,  '配置为每次请求随机获取代理IP')
            request.meta['proxy'] = self.get_vps_by_balance(spider)
            spider.spider_proxy[request.meta['proxy']] = spider.spider_proxy[request.meta['proxy']] + 1  # 连接数加1
            log.log_info(spider,  "使用代理ip：{} _请求__ {}".format(request.meta['proxy'], request.url))
            return

        if request.meta.get('proxy'):
            log.log_info(spider,  
                '请求对象中已经有IP代理字段，所以无需再去重新获取代理，可以确保一个会话内IP一致___{}___{}'
                    .format(request.meta['proxy'], request.url))
            spider.spider_proxy[request.meta['proxy']] = spider.spider_proxy[request.meta['proxy']] + 1  # 连接数加1
            return
        request.meta['proxy'] = self.get_vps_by_balance(spider)
        spider.spider_proxy[request.meta['proxy']] = spider.spider_proxy[request.meta['proxy']] + 1  # 连接数加1
        log.log_info(spider,  "使用代理ip：{} _请求__ {}".format(request.meta['proxy'], request.url))
