import logging
from scrapy import signals
from scrapy.exceptions import NotConfigured

logger = logging.getLogger(__name__)

class MyExts(object):
    def __init__(self):
        pass
    @classmethod
    def from_crawler(cls, crawler):
        ext = cls()
        # connect the extension object to signals
        crawler.signals.connect(ext.spider_opened, signal=signals.spider_opened)
        crawler.signals.connect(ext.spider_closed, signal=signals.spider_closed)
        crawler.signals.connect(ext.request_dropped, signal=signals.request_dropped)
        crawler.signals.connect(ext.request_scheduled, signal=signals.request_scheduled)
        crawler.signals.connect(ext.spider_error, signal=signals.spider_error)
        # return the extension object
        return ext

    def spider_opened(self, spider):
        logger.info("opened spider %s", spider.name)

    def spider_closed(self, spider):
        logger.info("closed spider %s", spider.name)

    def request_dropped(self, request, spider):
        logger.info("request dropped %s", request.url)

    def request_scheduled(self, request,spider):
        logger.info("scheduled %s", request.url)

    def spider_error(self,failure, response, spider):
        logger.info("failure spider %s", spider.name)