# -*- coding: utf-8 -*-

# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
import random
import time
import traceback

import requests
from scrapy import signals
from Banshan.utils.UserAgent import User_Agent_list
from scrapy.http import HtmlResponse

from twisted.internet import defer
from twisted.internet.error import TimeoutError, DNSLookupError, \
    ConnectionRefusedError, ConnectionDone, ConnectError, \
    ConnectionLost, TCPTimedOutError
from twisted.web.client import ResponseFailed
from scrapy.core.downloader.handlers.http11 import TunnelError
from scrapy.exceptions import IgnoreRequest
from scrapy import signals
from scrapy.utils.project import get_project_settings
import json
import logging
import requests
from twisted.internet.defer import DeferredLock
from Banshan.utils.ProxyModel import ProxyModel

settings = get_project_settings()
logger = logging.getLogger(__name__)



class RandomUserAgentMiddleware():
    def __init__(self):
        self.headers = {
            'pragma': 'no-cache',
            'cache-control': 'no-cache',
            'upgrade-insecure-requests': '1',
            'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36',
            'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
            'sec-fetch-site': 'none',
            'sec-fetch-mode': 'navigate',
            'sec-fetch-user': '?1',
            'sec-fetch-dest': 'document',
            'accept-language': 'zh-CN,zh;q=0.9',
        }

    def process_request(self, request, spider):
        # Called for each request that goes through the downloader
        # middleware.

        # Must either:
        # - return None: continue processing this request
        # - or return a Response object
        # - or return a Request object
        # - or raise IgnoreRequest: process_exception() methods of
        #   installed downloader middleware will be called
        self.headers["user-agent"] = random.choice(User_Agent_list)
        # if self.headers.get("referer"):
        #     print("refer...")
        #     del self.headers["referer"]
        request.headers.update(self.headers)
        # print("请求的url为:{}".format(request.url))
        # resp=requests.get(request.url,self.headers)
        # print(resp.text)
        return None

