# -*- coding: utf-8 -*-

__author__ = 'bitfeng'
# Define your downloadmiddlware here

import random
import logging
import copy
import json
import datetime
from scrapy import signals
from urllib import unquote


from twisted.internet import defer
from twisted.internet.error import TimeoutError, DNSLookupError, \
        ConnectionRefusedError, ConnectionDone, ConnectError, \
        ConnectionLost, TCPTimedOutError

from scrapy.exceptions import NotConfigured
from scrapy.utils.response import response_status_message
from scrapy.xlib.tx import ResponseFailed
import redis
import base64
from test1.tornado_fetcher import Fetcher

logger = logging.getLogger(__name__)


class ReplaceRequestToPhantomjsProxy(object):

    @classmethod
    def from_crawler(cls, crawler):
        midd = cls(crawler.settings)
        # crawler.signals.connect(midd.close_spider, signals.spider_closed)
        crawler.signals.connect(midd.open_spider, signals.spider_opened)
        return midd

    def __init__(self, settings):
        self.default_options = {
            'callback': None,
            'method': 'GET',
            'headers': None,
            'body': None,
            'cookies': None,
            'meta': None,
            'encoding': 'utf-8',
            'priority': 0,
            'dont_filter': False,
            'errback': None
        }
        self.js_execute_proxy_default = settings.get('JS_EXECUTE_PROXY')

    def open_spider(self, spider):
        self.js_execute_proxy = getattr(spider, 'js_execute_proxy', self.js_execute_proxy_default)

    # def close_spider(self, spider):
    #     self.conn.close()

    @staticmethod
    def parse_option(default_options, url, request, **kwargs):
        fetch = {}
        for x in ['url', 'method', 'headers', 'body', 'cookies', 'meta',
                  'encoding', 'priority', 'dont_filter', 'callback', 'errback']:
            fetch[x] = request.get(x, default_options[x])
        # fetch = default_options
        # fetch['url'] = url
        # fetch['headers'] = request.headers
        js_script = kwargs.get('js_script')
        if js_script:
            fetch['js_script'] = js_script
            fetch['js_run_at'] = kwargs.get('js_run_at', 'document-end')
        fetch['load_images'] = kwargs.get('load_images', False)
        return fetch

    def process_request(self, request, spider):
        if request.url == self.js_execute_proxy:
            return
        fetch = self.parse_option(
            default_options=self.default_options,
            url=request.url,
            request=request
        )
        request._set_url(self.js_execute_proxy)
        request.method = 'POST'
        request.headers["Content-Type"] = "application/x-www-form-urlencoded"
        request._set_body(json.dumps(fetch))
        print request.headers
        print ''
        print request.body
        print '-----------------------'
        return


class RandomUserAgent(object):
    userAgents = {
        'PC': [
            # safari 5.1 – MAC
            'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
            # safari 5.1 – Windows
            'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
            # IE 9.0
            'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0;',
            # IE 8.0
            'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)',
            # IE 7.0
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)',
            # IE 6.0
            'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)',
            # Firefox 4.0.1 – MAC
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
            # Firefox 4.0.1 – Windows
            'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
            # Opera 11.11 – MAC
            'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11',
            # Opera 11.11 – Windows
            'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11',
            # Chrome 17.0 – MAC
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11',
            # 傲游（Maxthon）
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)',
            # 腾讯TT
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)',
            # 世界之窗（The World） 2.x
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',
            # 世界之窗（The World） 3.x
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)',
            # 搜狗浏览器 1.x
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)',
            # 360浏览器
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)',
            # Avant
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)',
            # Green Browser
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',
        ],

        'mobile': [
            # 移动设备端：
            # safari iOS 4.33 – iPhone
            'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
            # safari iOS 4.33 – iPod Touch
            'Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
            # safari iOS 4.33 – iPad
            'Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
            # Android N1
            'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1',
            # Android QQ浏览器 For android
            'MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1',
            # Android Opera Mobile
            'Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10',
            # Android Pad Moto Xoom
            'Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13',
            # BlackBerry
            'Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+',
            # WebOS HP Touchpad
            'Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0',
            # Nokia N97
            'Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124',
            # Windows Phone Mango
            'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)',
            # UC无
            'UCWEB7.0.2.37/28/999',
            # UC标准
            'NOKIA5700/ UCWEB7.0.2.37/28/999',
            # UCOpenwave
            'Openwave/ UCWEB7.0.2.37/28/999',
            # UC Opera
            'Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999',
        ]
    }

    def process_request(self, request, spider):
        user_agent = random.choice(self.userAgents['PC'])
        if user_agent:
            request.headers.setdefault(b'User-Agent', user_agent)
        return


# class LogHttpErrorStatusUrl(object):
#
#     def process_response(self, request, response, spider):
#         if response.status < 200 or response.status > 300:
#             logging.log(logging.INFO, spider.name+':'+str(response.status)+' | '+response.url)
#         return response


class LogRequestResponseInfo(object):

    def process_response(self, request, response, spider):
        request_info = {
            'url': request.url,
            'meta': request.meta,
            'Cookie': request.headers.get('Cookie', 'None'),
        }
        # if request.method == 'POST':
            # request_info['body'] = request.body

        response_info = {
            'url': response.url,
            'meta': request.meta,
            'status': response.status,
            'Cookie': response.headers.get('Cookie', 'None')
        }
        # if response.status != 200:
        #     # response_info['body'] = response.body
        #     response_info['header'] = response.headers
        #
        #     request_info['header'] = request.headers

        logging.log(logging.INFO, '+++request: '+str(request_info))
        logging.log(logging.INFO, '---response: '+str(response_info))
        # logging.log(logging.INFO, '-'*100)
        return response


class SetProxy(object):

    def __init__(self, settings):
        self.redis_uri = settings.get('REDIS_URI')
        passwd = 'Bitfeng,123'
        self.r = redis.StrictRedis(host=self.redis_uri['host'], port=self.redis_uri['port'], db=self.redis_uri['db'],
                                   password=passwd)
        # self.f = open('proxy_log.txt', 'wb')

    @classmethod
    def from_crawler(cls, crawler):
        midd = cls(crawler.settings)
        # crawler.signals.connect(midd.close_spider, signals.spider_closed)
        # crawler.signals.connect(midd.open_spider, signals.spider_opened)
        return midd

    # def close_spider(self, spider):
    #     self.f.write()

    #     self.f.close()

    def get_proxy(self):
        try:
            proxy = self.r.lpop(self.redis_uri['proxy'])
            if not self.r.hexists(proxy, 'used_times'):
                self.r.hset(proxy, 'used_times', 1)
            return proxy
        except Exception, e:
            logging.log(logging.INFO, '[Warning]lpop proxy fail '+str(e))
            return

    def schedule_proxy(self, proxy):
        try:
            if self.r.hexists(proxy, 'used_times'):
                used_times = int(self.r.hget(proxy, 'used_times'))
                self.r.hset(proxy, 'used_times', used_times+1)
                if used_times < 100:
                    self.r.lpush(self.redis_uri['proxy'], proxy)

        except Exception, e:
            logging.log(logging.INFO, '[Warning]schedule proxy fail '+str(e))
            return

    def process_request(self, request, spider):
        # if request.meta.get('proxy'):
            # 如果meta设置了dont_proxy为True, 则不设置代理
        if request.meta.get('dont_proxy', False):
            return
        proxy = self.get_proxy()
        post_raw = unquote(request.body if request.method == 'POST' else '')
        if proxy:
            request.meta['proxy'] = self.get_proxy()
        #     logging.log(logging.INFO, 'SetRequestProxy: '+request.url+'---proxy: '+proxy+'---post_raw: %s' % post_raw)
        # else:
        #     logging.log(logging.INFO, 'SetRequestProxy: '+request.url+'---There is no proxy'+'---post_raw: %s' % post_raw)
        return

    def process_response(self, request, response, spider):
        proxy = request.meta.get('proxy', False)
        if proxy:
            if response.status == 200:
                self.schedule_proxy(proxy)
                # print respon
                # logging.log(logging.INFO, 'ScheduleResponseProxy: '+response.url+'---proxy: '+proxy)
            return response
        # else:
        #     logging.log(logging.INFO, 'ScheduleResponseProxy: '+response.url+'---There is no proxy')
        return response


class RetryMiddleware(object):

    # IOError is raised by the HttpCompression middleware when trying to
    # decompress an empty response
    EXCEPTIONS_TO_RETRY = (defer.TimeoutError, TimeoutError, DNSLookupError,
                           ConnectionRefusedError, ConnectionDone, ConnectError,
                           ConnectionLost, TCPTimedOutError, ResponseFailed,
                           IOError)

    def __init__(self, settings):
        if not settings.getbool('RETRY_ENABLED'):
            raise NotConfigured
        self.max_retry_times = settings.getint('RETRY_TIMES')
        self.retry_http_codes = set(int(x) for x in settings.getlist('RETRY_HTTP_CODES'))
        self.priority_adjust = settings.getint('RETRY_PRIORITY_ADJUST')

        self.redis_uri = settings.get('REDIS_URI')
        if self.redis_uri:
            self.r = redis.StrictRedis(host=self.redis_uri['host'], port=self.redis_uri['port'], db=self.redis_uri['db'])
        else:
            print 'REDIS_URI not configure'

    @classmethod
    def from_crawler(cls, crawler):
        # crawler.signals.connect(midd.open_spider, signals.spider_opened)
        return cls(crawler.settings)

    def process_response(self, request, response, spider):
        if request.meta.get('dont_retry', False):
            return response
        if response.status in self.retry_http_codes:
            reason = response_status_message(response.status)
            return self._retry(request, reason, spider) or response
        return response

    def process_exception(self, request, exception, spider):
        if isinstance(exception, self.EXCEPTIONS_TO_RETRY) \
                and not request.meta.get('dont_retry', False):
            return self._retry(request, exception, spider)

    def _retry(self, request, reason, spider):
        retries = request.meta.get('retry_times', 0) + 1

        if retries <= self.max_retry_times:
            logger.debug("Retrying %(request)s (failed %(retries)d times %(proxy)s): %(reason)s",
                         {'request': request,
                          'retries': retries,
                          'proxy': ('with proxy-%s' % request.meta['proxy']) if request.meta.get('proxy') else '',
                          'reason': reason},
                         extra={'spider': spider})
            retryreq = request.copy()
            retryreq.meta['retry_times'] = retries
            # ----------------------------------------------------------
            if request.meta.get('proxy'):
                proxy = self.get_proxy()
                if proxy:
                    retryreq.meta['proxy'] = proxy
            retryreq.priority = request.priority + self.priority_adjust
            retryreq.dont_filter = True
            # ----------------------------------------------------------
            return retryreq
        else:
            logger.debug("Gave up retrying %(request)s (failed %(retries)d times): %(reason)s",
                         {'request': request, 'retries': retries, 'reason': reason},
                         extra={'spider': spider})

    def get_proxy(self):
        try:
            proxy = self.r.lpop(self.redis_uri['proxy'])
            return proxy
        except Exception, e:
            logging.log(logging.INFO, '[Warning]lpop proxy fail '+str(e))
            return

COOKIES_ENABLED = True

# class ProxyMiddleware(object):
#
#     def process_request(self, request, spider):
#
#         request.meta['proxy'] = "us03-60.ssv7.net:30199"
#
#         proxy_user_pass = "us01-60.ssv7.net:d4GhVWDtWx8s"
#
#         encoded_user_pass = base64.encodestring(proxy_user_pass)
#
#         request.headers['Proxy-Authorization'] = 'Basic ' + encoded_user_pass


class ChangeCookie(object):

    def __init__(self, settings):
        self.cookie = self.get_cookie()
        self.cookie_refresh_datetime = datetime.datetime.now()
        self.cookie_refresh_duration = 60
        self.priority_adjust = settings.getint('RETRY_PRIORITY_ADJUST')
        # self.cookie_uri = settings.getint('COOKIE_URI')

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings)


    def process_request(self,request, spider):
        if request.meta.get('phantomjs') == '1':
            # print self.cookie
            request.headers['cookie'] = self.cookie
            return

    def process_response(self, request, response, spider):
        if self.response_exception(response):
            retryreq = request.copy()
            # self.cookie = self.get_cookie()
            if (datetime.datetime.now()-self.cookie_refresh_datetime).seconds > self.cookie_refresh_duration:
                self.cookie = self.get_cookie()
                self.cookie_refresh_datetime = datetime.datetime.now()
            # retryreq.priority = request.priority + self.priority_adjust
            retryreq.dont_filter = True
            return retryreq
        return response

    def get_cookie(self):
        fetcher = Fetcher()
        res = fetcher.phantomjs_fetch('http://wenshu.court.gov.cn')
        if res:
            return 'ccpassport=%s' % str(res['cookies']['ccpassport'])
        return None

    def response_exception(self, response):
        if response.headers['Content-Type'] == 'text/html' :
            print 'get_new_cookie'
            return True
        else:
            return False



# class ProxyMiddleware(object):
#
#     def process_request(self, request, spider):
#
#         request.meta['proxy'] = "http://210.51.2.203:8089"
#
#         # proxy_user_pass = ":d4GhVWDtWx8s"
#
#         return



#
# class ChangeCookie(object):
#
#     def process_request(self, request, spider):
#         if request.meta.get('phantomjs') == '1':
#             #cookies_select = open('cookie.txt')
#
#             fetcher = Fetcher()
#             res = fetcher.phantomjs_fetch('http://wenshu.court.gov.cn')['cookies']['ccpassport']
#             # f = r'/home/li-x/test1/test1/cookie.txt'
#             #
#             # with open(f,'r') as file:
#             #     cookies_select = file.read()
#             # file.close()
#             # request.headers['cookie'] = 'ccpassport=%s' %str(cookies_select)
#             request.headers['cookie'] = 'ccpassport=%s' %str(res)
#             return