# -*- coding: utf-8 -*-

import datetime
import os
import logging
import time
import config
import redis

from scrapy.crawler import CrawlerRunner
from django.conf import settings
from scrapy.utils import defer, reactor
from scrapy.utils.log import configure_logging
from scrapy.utils.project import get_project_settings
from twisted.internet import reactor, defer
from django.core.management.base import BaseCommand


class RunSpiderCommand(BaseCommand):
    help = 'run spider'
    name = 'runspider'
    is_redis_running = True

    def __init__(self, *a, **kw):
        super(RunSpiderCommand, self).__init__(*a, **kw)
        self.redis_key_running = '%s_running' % self.name

    def add_arguments(self, parser):
        parser.add_argument('-a', action = 'append', dest = 'spargs', default = [],
                            help = 'set spider argument (may be repeated)')

    # 必须实现的方法
    def handle(self, *args, **options):
        os.chdir(settings.BASE_DIR)

        self.spargs = arglist_to_dict(options['spargs'])
        print('spargs:%s time:%s' % (self.spargs, str(datetime.datetime.now())))

        configure_logging(install_root_handler = False)
        logging.basicConfig(
            filename = 'log/%s_%s.log' % (self.name, time.strftime('%Y-%m-%d', time.localtime())),
            format = '%(levelname)s %(asctime)s: %(message)s',
            level = logging.ERROR
        )

        if self.is_redis_running:
            self.redis_db = redis.StrictRedis(**config.redis_config)
            if self.redis_db.get(self.redis_key_running) is None:
                self.redis_db.set(self.redis_key_running, '1')
                try:
                    self.runspider()
                except Exception as e:
                    logging.exception('run_spider_exception:%s' % e)
                finally:
                    self.redis_db.delete(self.redis_key_running)
        else:
            try:
                self.runspider()
            except Exception as e:
                logging.exception('run_spider_exception:%s' % e)

        print('runing:%s time:%s' % (self.redis_key_running, str(datetime.datetime.now())))

    def get_spiders(self):
        return []

    # 运行抓取程序，使用代理抓取所有的商品评价
    def runspider(self):
        s = get_project_settings()
        runner = CrawlerRunner(settings = s)
        spiders = self.get_spiders()

        @defer.inlineCallbacks
        def crawl(**spargs):
            for spider in spiders:
                yield runner.crawl(spider, **spargs)
            reactor.stop()

        crawl(**self.spargs)
        reactor.run()  # the script will block here until the last crawl call is finished


def arglist_to_dict(arglist):
    """Convert a list of arguments like ['arg1=val1', 'arg2=val2', ...] to a
    dict
    """
    return dict(x.split('=', 1) for x in arglist)
