#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time    : 2021/5/10 上午11:51
# @Author  : Samge
import random

from itkz.resources.utils import log
from itkz.spider_ext.datas import data_agent
from itkz.spider_ext.middlewares.m_base import BaseMiddleware
from itkz.spider_ext.middlewares.m_proxy import ProxyMiddleware


class RandomUserAgentMiddleware(BaseMiddleware):

    def process_request(self, request, spider):
        if not self.is_use_agent:
            return

        if not request.meta.get('splash') or not request.meta.get('splash').get('args'):
            return

        splash_args = request.meta['splash']['args']
        url = splash_args.get('url')

        if not splash_args.get('headers'):
            splash_args['headers'] = {'User-Agent': random.choice(data_agent.user_agent_list)}

        user_agent = splash_args.get('headers').get('User-Agent')
        if user_agent and 'Scrapy' not in str(user_agent):
            log.log_info(spider, '请求对象中已经有用户代理字段，所以无需再去重新获取，可以确保一个会话内用户代理一致___{}___{}'.format(user_agent, url))
            return

        user_agent = random.choice(data_agent.user_agent_list)
        splash_args['headers']['User-Agent'] = user_agent
        log.log_info(spider, '获取到的用户代理user-agent： {}  url={}'.format(user_agent, url))


class SplashProxyMiddleware(ProxyMiddleware):

    def close_connect(self):
        self.cliredis.close_connect()

    def process_request(self, request, spider):

        if not self.is_use_proxy:
            return

        if not request.meta.get('splash') or not request.meta.get('splash').get('args'):
            return

        splash_args = request.meta['splash']['args']
        url = splash_args.get('url')

        if splash_args.get('crack_captcha') or '192.168.' in url:
            log.log_info(spider, '本次请求在meta中配置了crack_captcha或者为本地链接,不使用代理___{}'.format(url))
            return

        if self.is_every_request_random_proxy:
            log.log_info(spider, '配置为每次请求随机获取代理IP')
            proxy = self.get_vps_by_balance()
            self.set_proxy(request, spider, proxy)
            log.log_info(spider, "使用代理ip：{} _请求__ {}".format(proxy, url))
            return

        proxy = splash_args.get('proxy')
        if proxy:
            log.log_info(spider, '请求对象中已经有IP代理字段，所以无需再去重新获取代理，可以确保一个会话内IP一致___{}___{}'.format(proxy, url))
            self.set_proxy(request, spider, proxy)
            return

        proxy = self.get_vps_by_balance()
        self.set_proxy(request, spider, proxy)
        log.log_info(spider, "使用代理ip：{} _请求__ {}".format(proxy, url))

    def set_proxy(self, request, spider, proxy):
        # request.meta['splash']['args']['proxy'] = 'http://192.168.3.169:8888'  # 检验代理是否设置正常
        request.meta['splash']['args']['proxy'] = proxy
        spider.spider_proxy[proxy] = spider.spider_proxy[proxy] + 1  # 连接数加1
