import scrapy
from graphlib import Node
from copy import deepcopy


class NewSiteMapSpider(scrapy.Spider):
    name = "sitemap2"

    @classmethod
    def from_crawler(cls, crawler, *args, **kwargs):
        self = super().from_crawler(crawler, *args, **kwargs)
        self.sitemap = crawler.settings.getdict("DOUBAN_SITEMAP")
        self.sitemap = crawler.settings.getdict("DAGONG_SITEMAP")
        self.nodes = list(
            Node(selector) for selector in self.sitemap["selectors"])
        root = Node({"id": "_root", "parentSelectors": []})
        self.root = root
        self.nodes.append(root)
        for node in self.nodes:
            for parent in node["parentSelectors"]:
                for _ in self.nodes:
                    if _["id"] == parent:
                        _.relate(node, "child")
        return self

    def start_requests(self):
        for url in self.sitemap.get("startUrl"):
            yield scrapy.Request(url,
                                 meta={
                                     "node": self.root,
                                     "item": {
                                         "web-scraper-start-url": url
                                     }
                                 })

    def parse(self, response):
        node = response.meta["node"]
        item = deepcopy(response.meta["item"])
        parse_result = {
            "field": [],
            "multi-field": [],
            "next": []
        }
        for rel in node.rels(type="child"):
            field = rel.end["id"]
            selector = rel.end["selector"] + "::text"
            result = response.css(rel.end["selector"])
            try:
                multiple = rel.end["multiple"]
            except:
                multiple = False
            if rel.end["type"] == "SelectorGroup":
                extractAttribute = rel.end["extractAttribute"]
                text = response.css(selector).extract()
                parse_result["field"].append((field, text))
                if extractAttribute:
                    attrs = response.css(rel.end["selector"]+"::attr(%s)"%extractAttribute).extract()
                    parse_result["field"].append((f'{field}-{extractAttribute}'), attrs)
            if rel.end["type"] == "SelectorText":
                if multiple:
                    text = response.css(selector).extract()
                    parse_result["multi-field"].append((field, text))
                else:
                    text = response.css(selector).get()
                    parse_result["field"].append((field, text))
            if rel.end["type"] == "SelectorLink":
                if multiple:
                    urls = response.css(rel.end["selector"]).css("::attr(href)").extract()
                else:
                    urls = [result.css("::attr(href)").get()]
                for url in urls:
                    parse_result["next"].append((url, rel.end))
            if rel.end["type"] == "SelectorElement":


        item.update(dict(parse_result["field"]))
        if not parse_result["next"]:
            yield item
        else:
            for url, _node in parse_result["next"]:
                print("url:", url)
                _item = deepcopy(item)
                _item[_node["id"]+"-href"] = url
                yield response.follow(url, meta={"item": _item, "node": _node})
    def parse_select(self, response, node, item, parse_result):
        for rel in node.rels(type="child"):
            field = rel.end["id"]
            selector = rel.end["selector"] + "::text"
            result = response.css(rel.end["selector"])
            try:
                multiple = rel.end["multiple"]
            except:
                multiple = False
            if rel.end["type"] == "SelectorGroup":
                extractAttribute = rel.end["extractAttribute"]
                text = response.css(selector).extract()
                parse_result["field"].append((field, text))
                if extractAttribute:
                    attrs = response.css(rel.end["selector"]+"::attr(%s)"%extractAttribute).extract()
                    parse_result["field"].append((f'{field}-{extractAttribute}'), attrs)
            if rel.end["type"] == "SelectorText":
                if multiple:
                    text = response.css(selector).extract()
                    parse_result["multi-field"].append((field, text))
                else:
                    text = response.css(selector).get()
                    parse_result["field"].append((field, text))
            if rel.end["type"] == "SelectorLink":
                if multiple:
                    urls = response.css(rel.end["selector"]).css("::attr(href)").extract()
                else:
                    urls = [result.css("::attr(href)").get()]
                for url in urls:
                    parse_result["next"].append((url, rel.end))
            if rel.end["type"] == "SelectorElement":
