# -*- coding: utf-8 -*-

# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
import codecs
import datetime
import logging

from urllib.parse import urlparse

from pyquery import PyQuery as pq
import requests
from scrapy import signals
from scrapy.utils.response import response_status_message

class HttpErrorMiddleware(object):
    logger = logging.getLogger(__name__)
    def process_spider_exception(self,request, response, exception, spider):
        self.logger.debug('get exception')
        return request

    def get_proxy(self,port):
        url="http://localhost:{port}/random"
        proxy = requests.get(url.format(port=port)).text
        doc = pq(proxy)
        if doc('.debugger'):
            proxy = None
        else:
            proxy='http://' + str(proxy)
        print("***********using proxy**************:", proxy)
        return proxy

    # def process_request(self, request,spider):
    #     if urlparse(request.url).netloc=="www.ic.net.cn":
    #         proxy = self.get_proxy(5001)
    #         request.meta['proxy'] = proxy
    #     else:
    #         proxy = self.get_proxy(5000)
    #         request.meta['proxy'] = proxy




    def process_response(self,request,response,spider):

        if response.status > 400 :
            print(response.status)
            proxy=self.get_proxy(5000)
            request.meta['proxy']=proxy
            request.dont_filter=True
            return request
        #return None
        return response

    def process_exception(self, request,response,exception, spider):
        print("**********捕获错误********")
        proxy = self.get_proxy()
        request.meta['proxy'] = proxy
        request.dont_filter = True
        print("***********using proxy**************:", proxy)
        return request


#异常记录中间件
class CustomFaillogMiddleware(object):

    @classmethod
    def from_crawler(cls, crawler):
        return cls()

    def process_response(self, request, response, spider):
        if response.status >= 400:
            reason = response_status_message(response.status)
            self._faillog(request, u'HTTPERROR',reason, spider)
        return response

    def process_exception(self, request,response,exception, spider):
        self._faillog(request, u'EXCEPTION', exception, spider)
        return None

    def _faillog(self, request, errorType, reason, spider):
        with codecs.open('faillog.log', 'a', encoding='utf-8') as file:
            file.write("%(now)s [%(error)s] %(url)s reason: %(reason)s \r\n" %
                       {'now':datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                        'error': errorType,
                        'url': request.url,
                        'reason': reason})