#!/usr/bin/env python
# -*- coding:utf8 -*-
import re
import base64
import json
import time
import random
from retrying import retry
from requests import get
from squirrel_core.commons.utils.logger import Logging
from squirrel_core.commons.proxy import Proxy
from squirrel_core.commons.utils.user_agent_const import UserAgent
from squirrel_core.commons.utils.get_config import get_config

__all__ = ["ProxyMiddleware"]


class ProxyMiddleware(object):
    def __init__(self, settings):
        self.logger = Logging()
        self.enable_proxy = settings.get('USE_PROXY', True)
        self.settings = settings
        self.max_retry_times = settings.get('RETRY_TIMES', 20)
        self.proxy_max_use = settings.get("PROXY_MAX_USE")
        self.name_second = settings.get('NAME_SECOND', "spider")
        self.name_first = settings.get('NAME_FIRST', "spider")
        self.job_id = settings.get('JOB_ID')
        self.proxy_q_name = settings.get('PROXY_Q_NAME', "makaka_spider")
        self.proxy_type = settings.get('PROXY_TYPE', 1)
        self.proxy_instance = None
        self.logger.info("Scrapy代理中间件初始化成功")

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings)

    def process_request(self, request, spider):
        if self.enable_proxy and request.meta.get("use_proxy", True):
            try:
                self.logger.info(f'开始更换代理')
                self._change_proxy(request, self.proxy_max_use)
            except Exception as e:
                self.logger.warning(f'更换代理出错, 请求地址: {request.url}, 原因: {e}')

    def process_response(self, request, response, spider):
        self._update_proxy_state(request, response, spider)
        return response

    def process_exception(self, request, exception, spider):
        self.logger.warning(f'请求失败, 地址:{request.url}, body:{request.body}, exception:{exception}')
        if self.proxy_instance:
            try:
                self._set_proxy_state(request, str(int(exception.osError)))
            except Exception:
                self._set_proxy_state(request, "10060")

    def _change_proxy(self, request, proxy_max_use=5):
        proxy_change = request.meta.get('proxy_change', False)
        proxy_str = request.meta.get('proxy_black', '')
        if proxy_str:
            proxy_ip, proxy_port = re.findall('(\d+.\d+.\d+.\d+):(\d+)', proxy_str.replace('http://', '').replace('https://', ''))[0]
            proxy_black_list = [{"ip": proxy_ip, "port": proxy_port}]
        else:
            proxy_black_list = []

        proxy = self._get_proxy(self.name_first, self.name_second, proxy_max_use, proxy_change, proxy_black_list)
        if '@' in proxy:
            proxy_list = proxy.split('@')
            proxy = proxy_list[-1]
            proxy_pre = proxy_list[0]
            proxy_user_pass = '@'.join(proxy_list[:-1])
            encoded_user_pass = base64.encodebytes(proxy_user_pass.encode())
            request.headers['Proxy-Authorization'] = b'Basic ' + encoded_user_pass
            request.meta['proxy_pre'] = proxy_pre
        request.meta['proxy'] = f"http://{proxy}"
        if not request.headers['User-Agent']:
            useragent = random.choice(UserAgent.CUS_USER_AGENTS)
            self.logger.info(f'爬虫未配置User-Agent, 使用内置: {useragent}')
            request.headers['User-Agent'] = useragent
        self.logger.info(f'更换代理成功: {proxy},请求网址: {request.url}')

    @retry(wait_fixed=500)
    def _get_proxy(self, name_first, name_second, proxy_max_use=0, proxy_change=False, proxy_black_list=None):
        tsleep = 1
        if not self.proxy_instance:
            self.proxy_instance = Proxy(proxy_max_use)
        try:
            proxy = self.proxy_instance._get_proxy(proxy_change, proxy_black_list)
            if proxy.get('proxy', ''):
                self.logger.info(f'爬虫{name_first}_{name_second}, 获取代理成功: {proxy.get("proxy", "")}')
                return proxy.get('proxy')
            else:
                self.logger.warning(f'{name_first}_{name_second}, 获取代理为空, 休眠{tsleep}秒将重试')
                time.sleep(tsleep)
                raise Exception()
        except Exception as e:
            self.logger.error(f'{name_first}_{name_second}, 获取代理出错: {e}, 休眠{tsleep}秒将重试')
            time.sleep(tsleep)
            raise e

    def _update_proxy_state(self, request, response, spider):
        if self.proxy_instance:
            try:
                self._set_proxy_state(request, response.status)
            except Exception:
                self._set_proxy_state(request, "10060")

    def _set_proxy_state(self, request, status_code):
        try:
            proxy_pre = request.meta.get("proxy_pre", "")
            if proxy_pre:
                proxy_pre += "@"
            _msg = {
                'proxy': request.meta.get('proxy'),
                'job_id': self.job_id,
                'url': request.url,
                'status_code': status_code,
                'name_first': self.name_first,
                'name_second': self.name_second,
                'do_time': time.time(),
            }
            if status_code not in [200, '200', '521', 521]:
                self.logger.error(json.dumps(_msg))
            else:
                self.logger.info(json.dumps(_msg))
            proxy = proxy_pre + request.meta.get('proxy', '').replace("http://", "")
            try:
                self.proxy_instance.set_state(status_code, proxy)
            except:
                self.proxy_instance.set_state(10060, proxy)
        except Exception as e:
            self.logger.error(f"设置代理状态码出错: {e}")