import codecs
from scrapy.exceptions import CloseSpider, NotConfigured
from .expire import Accounts, exp_backoff_full_jitter
import logging
from scrapy import signals
from twisted.internet import task
from functools import partial
from scrapy.utils.misc import load_object

logger = logging.getLogger(__name__)


class RotatingAccountMiddleware(object):
    """account format： username:password@proxy
    """
    def __init__(self, account_list, logstats_interval, stop_if_no_accounts,
                 max_accounts_to_try, backoff_base, backoff_cap, crawler):
        backoff = partial(exp_backoff_full_jitter,
                          base=backoff_base,
                          cap=backoff_cap)
        self.accounts = Accounts(account_list,
                                 backoff=backoff)
        self.logstats_interval = logstats_interval
        self.reanimate_interval = 5
        self.stop_if_no_accounts = stop_if_no_accounts
        self.max_accounts_to_try = max_accounts_to_try
        self.stats = crawler.stats

    @classmethod
    def from_crawler(cls, crawler):
        s = crawler.settings
        account_path = s.get('ROTATING_ACCOUNT_LIST_PATH', None)
        if account_path is not None:
            with codecs.open(account_path, 'r', encoding='utf8') as f:
                account_list = [line.strip() for line in f if line.strip()]
        else:
            account_list = s.getlist('ROTATING_ACCOUNT_LIST')
        mw = cls(
            account_list=account_list,
            logstats_interval=s.getfloat('ROTATING_ACCOUNT_LOGSTATS_INTERVAL',
                                         30),
            stop_if_no_accounts=s.getbool('ROTATING_ACCOUNT_CLOSE_SPIDER',
                                         False),
            max_accounts_to_try=s.getint('ROTATING_ACCOUNT_PAGE_RETRY_TIMES',
                                        5),
            backoff_base=s.getfloat('ROTATING_ACCOUNT_BACKOFF_BASE', 300),
            backoff_cap=s.getfloat('ROTATING_ACCOUNT_BACKOFF_CAP', 3600),
            crawler=crawler,
        )
        crawler.signals.connect(mw.engine_started,
                                signal=signals.engine_started)
        crawler.signals.connect(mw.engine_stopped,
                                signal=signals.engine_stopped)
        return mw

    def engine_started(self):
        self.log_task = task.LoopingCall(self.log_stats)
        self.log_task.start(self.logstats_interval, now=True)
        self.reanimate_task = task.LoopingCall(self.reanimate_accounts)
        self.reanimate_task.start(self.reanimate_interval, now=False)

    def engine_stopped(self):
        if self.log_task.running:
            self.log_task.stop()
        if self.reanimate_task.running:
            self.reanimate_task.stop()

    def reanimate_accounts(self):
        n_reanimated = self.accounts.reanimate()
        if n_reanimated:
            logger.debug("%s account moved from 'dead' to 'reanimated'",
                         n_reanimated)

    def _handle_result(self, request, spider):
        account = request.meta.get('account', None)
        if not (account and request.meta.get('_rotating_account')):
            return
        self.stats.set_value(
            'accounts/unchecked',
            len(self.accounts.unchecked) - len(self.accounts.reanimated))
        self.stats.set_value('accounts/reanimated',
                             len(self.accounts.reanimated))
        self.stats.set_value('accounts/mean_backoff',
                             self.accounts.mean_backoff_time)
        ban = request.meta.get('_ban', None)
        if ban is True:
            self.accounts.mark_dead(account)
            self.stats.set_value('accounts/dead', len(self.accounts.dead))
            return self._retry(request, spider)
        elif ban is False:
            self.accounts.mark_good(account)
            self.stats.set_value('accounts/good', len(self.accounts.good))

    def process_request(self, request, spider):
        account = self.accounts.get_random()
        print("mw use proxya", account)
        if not account:
            if self.stop_if_no_accounts:
                raise CloseSpider("no_accounts")
            else:
                logger.warn("No accounts available; marking all accounts "
                            "as unchecked")
                self.accounts.reset()
                account = self.accounts.get_random()
                if account is None:
                    logger.error("No accounts available even after a reset.")
                    raise CloseSpider("no_accounts_after_reset")
        request.meta['proxy'] = account.proxy
        request.meta["account"] = account
        request.meta["download_slot"] = self.get_account_slot(account)
        request.meta['_rotating_account'] = True
        cookies = {"sessionid": account.session_id}
        
        request.cookies = cookies
        return None

    def process_exception(self, request, exception, spider):
        return self._handle_result(request, spider)

    def process_response(self, request, response, spider):
        return self._handle_result(request, spider) or response

    def get_account_slot(self, account):
        return account.username
    def log_stats(self):
        logger.info('%s' % self.accounts)


class BanDetectionMiddleware(object):
    """
    Downloader middleware for detecting bans. It adds
    '_ban': True to request.meta if the response was a ban.

    To enable it, add it to DOWNLOADER_MIDDLEWARES option::

        DOWNLOADER_MIDDLEWARES = {
            # ...
            'rotating_proxies.middlewares.BanDetectionMiddleware': 620,
            # ...
        }

    By default, client is considered banned if a request failed, and alive
    if a response was received. You can override ban detection method by
    passing a path to a custom BanDectionPolicy in 
    ``ROTATING_PROXY_BAN_POLICY``, e.g.::
      
    ROTATING_PROXY_BAN_POLICY = 'myproject.policy.MyBanPolicy'
    
    The policy must be a class with ``response_is_ban``  
    and ``exception_is_ban`` methods. These methods can return True 
    (ban detected), False (not a ban) or None (unknown). It can be convenient
    to subclass and modify default BanDetectionPolicy::
        
        # myproject/policy.py
        from rotating_proxies.policy import BanDetectionPolicy
        
        class MyPolicy(BanDetectionPolicy):
            def response_is_ban(self, request, response):
                # use default rules, but also consider HTTP 200 responses
                # a ban if there is 'captcha' word in response body.
                ban = super(MyPolicy, self).response_is_ban(request, response)
                ban = ban or b'captcha' in response.body
                return ban
                
            def exception_is_ban(self, request, exception):
                # override method completely: don't take exceptions in account
                return None
        
    Instead of creating a policy you can also implement ``response_is_ban`` 
    and ``exception_is_ban`` methods as spider methods, for example::

        class MySpider(scrapy.Spider):
            # ...

            def response_is_ban(self, request, response):
                return b'banned' in response.body

            def exception_is_ban(self, request, exception):
                return None
     
    """
    def __init__(self, stats, policy):
        self.stats = stats
        self.policy = policy

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.stats, cls._load_policy(crawler))

    @classmethod
    def _load_policy(cls, crawler):
        policy_path = crawler.settings.get(
            'ROTATING_PROXY_BAN_POLICY',
            'rotating_accounts.policy.BanDetectionPolicy'
        )
        policy_cls = load_object(policy_path)
        if hasattr(policy_cls, 'from_crawler'):
            return policy_cls.from_crawler(crawler)
        else:
            return policy_cls()

    def process_response(self, request, response, spider):
        is_ban = getattr(spider, 'response_is_ban',
                         self.policy.response_is_ban)
        ban = is_ban(request, response)
        request.meta['_ban'] = ban
        if ban:
            self.stats.inc_value("bans/status/%s" % response.status)
            if not len(response.body):
                self.stats.inc_value("bans/empty")
        return response

    def process_exception(self, request, exception, spider):
        is_ban = getattr(spider, 'exception_is_ban',
                         self.policy.exception_is_ban)
        ban = is_ban(request, exception)
        if ban:
            ex_class = "%s.%s" % (exception.__class__.__module__,
                                  exception.__class__.__name__)
            self.stats.inc_value("bans/error/%s" % ex_class)
        request.meta['_ban'] = ban