from __future__ import absolute_import
import traceback
import warnings

from zope.interface import implementer
from scrapy.interfaces import ISpiderLoader
from scrapy.utils.misc import walk_modules
from scrapy.utils.spider import iter_spider_classes
from squirrel_core.commons.utils.logger import Logging


@implementer(ISpiderLoader)
class SpiderLoader(object):
    def __init__(self, settings):
        self.spider_modules = settings.getlist('SPIDER_MODULES')
        self.warn_only = settings.getbool('SPIDER_LOADER_WARN_ONLY')
        self._spiders = {}
        self.logger = Logging()

    @classmethod
    def from_settings(cls, settings):
        return cls(settings)

    def load(self, spider_name):
        try:
            return self._spiders[spider_name]
        except KeyError:
            name = f"{self.spider_modules[0]}.{spider_name.split('_')[0]}.{spider_name}"
            try:
                spider = next(spcls for module in walk_modules(name)
                              for spcls in iter_spider_classes(module)
                              if spcls.name == spider_name)
                self._spiders[spider.name] = spider
            except ImportError:
                if self.warn_only:
                    msg = f"Could not load spiders from module '{name}':{traceback.format_exc()}"
                    warnings.warn(msg, RuntimeWarning)
                else:
                    self.logger.error(traceback.format_exc())
                    raise
            except StopIteration:
                raise KeyError(f"Spider not found: {spider_name}")
            except Exception as e:
                raise (f"error:{e}")
            else:
                return spider

    def find_by_request(self, request):
        return [name for name, cls in self._spiders.items() if cls.handles_request(request)]

    def list(self):
        return list(self._spiders.keys())
