# -*- coding: utf-8 -*-#
# 导入随机模块
# 导入settings文件中的UPPOOL
# 导入随机模块
import random

# 导入官方文档对应的HttpProxyMiddleware
# from scrapy.contrib.downloadermiddleware.useragent import UserAgentMiddleware
from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware
from scrapy.downloadermiddlewares.useragent import UserAgentMiddleware

from .settings import IPPOOL
# 导入settings文件中的UPPOOL
from .settings import UPPOOL


class Uamid(UserAgentMiddleware):
    # 初始化 注意一定要user_agent，不然容易报错

    def __init__(self, user_agent=''):
        super().__init__(user_agent)
        self.user_agent = user_agent

    # 请求处理
    def process_request(self, request, spider):
        # 先随机选择一个用户代理
        thisua = random.choice(UPPOOL)
        print("当前使用User-Agent是：" + thisua)
        request.headers.setdefault('User-Agent', thisua)


class IPPOOlS(HttpProxyMiddleware):

    # 初始化
    def __init__(self, ip=''):
        super().__init__()
        self.ip = ip

    # 请求处理
    def process_request(self, request, spider):
        # 先随机选择一个IP
        thisip = random.choice(IPPOOL)
        print("当前使用IP是：" + thisip["ipaddr"])
        request.meta["proxy"] = "http://" + thisip["ipaddr"]


