# 　定义实现随机User-Agent的下载中间件
from crawlspiderdemo.settings import USER_AGENTS_LIST
import random


class UserAgentMiddleware(object):

    def process_request(self, request, spider):
        """
        处理请求对象
        :param request: 请求对象
        :param spider: 爬虫对象
        :return: 表示处理完request对象,直接交给下载器处理,获取响应对象
        """
        # 获取一个随机的UA
        user_agent = random.choice(USER_AGENTS_LIST)
        request.headers["User-Agent"] = user_agent


class CheckUA(object):
    def process_response(self, request, response, spider):
        print(request.headers["User-Agent"])
        return response


# # 免费代理IP
# class ProxyMiddleware(object):
#
#     def process_request(self, request, spider):
#         """
#         代理ip的使用:
#             代理添加的位置：request.meta中增加proxy字段
#             获取一个代理ip，赋值给request.meta['proxy']
#                 代理池中随机选择代理ip
#                 代理ip的webapi发送请求获取一个代理ip
#         :param request:
#         :param spider:
#         :return:
#         """
#         # 准备好代理ip
#         proxy = "https://116.196.85.150:3128"
#         # 设置代理
#         request.meta["proxy"] = proxy
#         # 可以不写return
#         return None

# # 检测代理ip是否可用
# def process_response(self, request, response, spider):
#     if response.status != '200':
#         request.dont_filter = True  # 重新发送的请求对象能够再次进入队列
#         return request


# 收费代理IP
# import base64
#
# # 代理隧道验证信息  这个是在那个网站上申请的
# proxyServer = 'http://proxy.abuyun.com:9010' # 收费的代理ip服务器地址，这里是abuyun
# proxyUser = 用户名
# proxyPass = 密码
# proxyAuth = "Basic " + base64.b64encode(proxyUser + ":" + proxyPass)
#
# class ProxyMiddleware(object):
#     def process_request(self, request, spider):
#         # 设置代理
#         request.meta["proxy"] = proxyServer
#         # 设置认证
#         request.headers["Proxy-Authorization"] = proxyAuth


# 中间件中使用selenium
from selenium import webdriver
import time


def get_cookies():
    # 使用selenium登录github 拿到登陆之后的cookie
    login_url = "https://github.com/login"
    username = "ZuoAndroid"
    password = "lyp82nlf@.."
    # username = input('输入github账号:')
    # password = input('输入github密码:')
    options = webdriver.ChromeOptions()
    options.add_argument("--headless")
    options.add_argument("--disable-gpu")
    driver = webdriver.Chrome(executable_path="./chromedriver", chrome_options=options)
    driver.get(login_url)
    time.sleep(3)
    driver.find_element_by_xpath("//input[@id='login_field']").send_keys(username)
    driver.find_element_by_xpath("//input[@id='password']").send_keys(password)
    driver.find_element_by_xpath("//input[@class='btn btn-primary btn-block']").click()
    time.sleep(3)
    # 获取cookie
    cookis_str = driver.get_cookies()
    cookies_dict = {cookie["name"]: cookie["value"] for cookie in cookis_str}
    driver.quit()
    return cookies_dict


class SeleniumMiddlewares(object):

    def process_request(self, request, spider):
        # # 优化:判断url地址如果是起始页的url, 才使用selenium发送一次登录的请求
        # # 获取到cookie  将这个cookie保存
        # if request.url == spider.start_urls[0]:
        #     cookie_dict = get_cookies()
        #     print(cookie_dict)
        #     # 对请求对象的cookies属性进行替换
        #     request.cookies = cookie_dict
        cookie_dict = get_cookies()
        print(cookie_dict)
        # 对请求对象的cookies属性进行替换
        request.cookies = cookie_dict
