import scrapy
import json
from copy import deepcopy
import time


class SiteMapSpider(scrapy.Spider):
    name = "sitemap"
    custom_settings = {
        "DOWNLOAD_DELAY": 2
    }

    @classmethod
    def from_crawler(cls, crawler, *args, **kwargs):
        self = super().from_crawler(crawler, *args, **kwargs)
        self.sitemap = crawler.settings.getdict("DOUBAN_SITEMAP")
        return self

    #def __init__(self, *args, **kwargs):
    #    self.sitemap = self.settings.get_dict("SITEMAP")

    def start_requests(self):
        for url in self.sitemap.get("startUrl"):
            yield scrapy.Request(url, meta={"id": "_root", "item": {"web-scraper-start-url": url}})

    def get_selectors(self, current_id):
        nodes = []
        for selector in self.sitemap.get("selectors"):
            if current_id in selector.get("parentSelectors"):
                nodes.append(selector)
        return nodes

    def parse(self, response):
        current_id = response.meta.get("id")
        item = deepcopy(response.meta.get("item"))
        selectors = self.get_selectors(current_id)

        for selector in selectors:
            result = response.css(selector.get("selector"))
            select_type = selector.get("type")
            multiple = selector.get("multiple")
            item = deepcopy(item)
            if select_type == "SelectorText":
                for _item in self.handler_text(response, selector, item):
                    yield _item
            elif select_type == "SelectorLink" and not multiple:
                for _request in self.handler_single_link(response, selector, item):
                    yield _request
            elif select_type == "SelectorLink" and multiple:
                for _request in self.handler_link(response, selector, item):
                    yield _request
            elif select_type == "SelectorGroup":
                pass

    def handler_text(self, response, selector, item):
        select_id = selector.get("id")
        item = deepcopy(item)
        text = response.css(selector.get("selector")).extract()
        item[select_id] = '\n'.join(text)
        item["web-scraper-order"] = time.time()
        yield item

    def handler_single_link(self, response, selector, item):
        select_id = selector.get("id")
        item = deepcopy(item)
        result = response.css(selector.get("selector"))
        title = result.css("::attr(title)").extract_first()
        text = result.css("::text").extract_first()
        url = result.css("::attr(href)").extract_first()
        item[select_id] = title or text or ""
        item[selector["id"]+"-"+"href"] = url
        yield response.follow(url, meta={"id": select_id, "item": deepcopy(item)})

    def handler_link(self, response, selector, item):
        select_id = selector.get("id")
        for link in response.css(selector.get("selector")):
            item = deepcopy(item)
            title = link.css("::attr(title)").extract_first()
            text = link.css("::text").extract_first()
            url = link.css("::attr(href)").extract_first()
            item[select_id] = title or text or ""
            item[selector["id"]+"-"+"href"] = url
            yield response.follow(url, meta={"id": select_id, "item": deepcopy(item)})
