from scrapy.commands import ScrapyCommand
from scrapy.crawler import CrawlerRunner
from scrapy.utils.conf import arglist_to_dict
from scrapy.utils.log import configure_logging
import tutorial.settings
from tutorial.cogik_setting import *
import pprint
import logging

pp = pprint.PrettyPrinter(indent = 2)

class Command(ScrapyCommand):

    requires_project = True

    def syntax(self):
        return '[options]'

    def short_desc(self):
        return 'Runs all of the spiders'

    def add_options(self, parser):
        ScrapyCommand.add_options(self, parser)
        parser.add_option("-a", dest="spargs", action="append",
                          default=[], metavar="NAME=VALUE",
                          help="set spider argument (may be repeated)")
        parser.add_option("-o", "--output", metavar="FILE",
                          help="dump scraped items into \
                          FILE (use - for stdout)")
        parser.add_option("-t", "--output-format", metavar="FORMAT",
                          help="format to use for dumping items with -o")

    def process_options(self, args, opts):
        ScrapyCommand.process_options(self, args, opts)
        try:
            opts.spargs = arglist_to_dict(opts.spargs)
        except ValueError:
            pass

    def run(self, args, opts):
        configure_logging()
        spider_loader = self.crawler_process.spider_loader
        pp.pprint(SPIDER_WHITELIST)
        for spider_name in args or spider_loader.list():
            if spider_name in SPIDER_WHITELIST:
                self.crawler_process.crawl(spider_name, **opts.spargs)
            else:
                logging.log(logging.WARNING, "%s is not in whitelist", spider_name)
        self.crawler_process.start()

        # configure_logging()
        # runner = CrawlerRunner()
        # runner.crawl(TvSpider)
        # runner.crawl(VarietySpider)
        # d = runner.join()
        # d.addBoth(lambda _: reactor.stop())

        # reactor.run()

# from scrapy.commands import ScrapyCommand
# from scrapy.crawler import CrawlerRunner
# from scrapy.utils.conf import arglist_to_dict
# from scrapy.utils.log import configure_logging
# # from tutorial.spiders.tv_spider import TvSpider
# # from tutorial.spiders.variety_spider import VarietySpider


# class Command(ScrapyCommand):

#     requires_project = True

#     def syntax(self):
#         return '[options]'

#     def short_desc(self):
#         return 'Runs all of the spiders'

#     def add_options(self, parser):
#         ScrapyCommand.add_options(self, parser)
#         parser.add_option("-a", dest="spargs", action="append",
#                           default=[], metavar="NAME=VALUE",
#                           help="set spider argument (may be repeated)")
#         parser.add_option("-o", "--output", metavar="FILE",
#                           help="dump scraped items into \
#                           FILE (use - for stdout)")
#         parser.add_option("-t", "--output-format", metavar="FORMAT",
#                           help="format to use for dumping items with -o")

#     def process_options(self, args, opts):
#         ScrapyCommand.process_options(self, args, opts)
#         try:
#             opts.spargs = arglist_to_dict(opts.spargs)
#         except ValueError:
#             pass

#     def run(self, args, opts):
#         configure_logging()
#         spider_loader = self.crawler_process.spider_loader
#         for spider_name in args or spider_loader.list():
#             self.crawler_process.crawl(spider_name, **opts.spargs)
#         self.crawler_process.start()

#         # configure_logging()
#         # runner = CrawlerRunner()
#         # runner.crawl(TvSpider)
#         # runner.crawl(VarietySpider)
#         # d = runner.join()
#         # d.addBoth(lambda _: reactor.stop())

#         # reactor.run()
