# -*- coding: utf-8 -*-

# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html

from weibo_spiders.settings import USER_AGENT_LIST
import random
import time
import requests
from scrapy.downloadermiddlewares.retry import RetryMiddleware
import logging
import os
from weibo_spiders.utils.accounts import cookies

# cookie池
class CookiesMiddleware(object):
    """
    每次请求都随机从账号池中选择一个账号去访问
    """

    def __init__(self):
        self.count = -1
        self.num = len(cookies)
        logging.log(logging.INFO, "[LOAD %d Cookies]" % self.num)

    def process_request(self, request, spider):
        if self.num != 0:
            # 依次使用cookie
            self.count = (self.count + 1) % self.num
            logging.log(logging.INFO, "USE Cookie: " + str(self.count))
            request.headers.setdefault('Cookie', cookies[self.count])
        else:
            logging.log(logging.INFO, "[USE NO Cookie !]")
        # 随机UA
        request.headers['User-Agent'] = random.choice(USER_AGENT_LIST)


class ProxyMiddleware(object):
    def __init__(self):
        pass
        # for r in self.getkeys():
        #     self.proxylist.append('http://'+":".join(r))

    def get_proxy(self):
        result = ''
        try:
            res_json = requests.get("http://127.0.0.1:5010/get/").json()
            proxy = res_json['proxy']
            if proxy != '':
                result = proxy
            else:
                logging.log(logging.INFO, "【代理池为空！】")
        except Exception:
            logging.log(logging.INFO, "【获取代理失败！】")
        return result

    def process_request(self, request, spider):
        # request.meta['proxy'] = random.choice(self.proxylist)
        proxy = self.get_proxy()
        if proxy != '':
            request.meta['proxy'] = 'https://' + proxy
            logging.log(logging.INFO, 'UseProxy:', request.meta['proxy'])
        else:
            logging.log(logging.INFO, '[Use No Proxy!]')


class MyRetryMiddleware(RetryMiddleware):
    def delete_proxy(self, proxy):
        if proxy:
            logging.log(logging.INFO, '【删除代理】', proxy[8:])
            requests.get(
                "http://127.0.0.1:5010/delete/?proxy={}".format(proxy[8:]))

    def process_response(self, request, response, spider):
        # if request.meta.get('dont_retry', False):
        # return response
        if response.status != 200:
            reason = response.status
            # 删除该代理
            self.delete_proxy(request.meta.get('proxy', False))
            time.sleep(random.choice(range(1, 2)))
            request.meta['proxy'] = ''
            logging.log(logging.INFO, '[{}] 返回值异常, 进行重试...'.format(reason))
            return self._retry(request, reason, spider) or response
        return response

    def process_exception(self, request, exception, spider):
        if isinstance(exception, self.EXCEPTIONS_TO_RETRY) \
                and not request.meta.get('dont_retry', False):
            # 删除该代理
            self.delete_proxy(request.meta.get('proxy', False))
            time.sleep(random.choice(range(1, 2)))
            logging.log(logging.INFO, '连接异常, 进行重试...')
            return self._retry(request, exception, spider)
