import scrapy
from scrapy_redis.spiders import RedisSpider
from ..items import BookItem



class BookspiderSpider(RedisSpider):
    name = 'bookspider'
    # allowed_domains = ['bookspider.com']
    # start_urls = []
    redis_key = 'BookSpider:start_urls'
    item = BookItem()
    # base_url = 'https://book.douban.com/tag/?view=type&icn=index-sorttags-all'
    headers = {
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
        "Accept-Language": "zh-CN,zh;q=0.9,zh-TW;q=0.8,en-US;q=0.7,en;q=0.6",
        "Cache-Control": "max-age=0",
        "Connection": "keep-alive",
        "Cookie": 'bid=p_Kb3WLc87Y; ap_v=0,6.0; viewed="26919485"; gr_user_id=34584747-9d8c-46c2-8b23-4f9fa7f5cc56; gr_cs1_b0b69eda-70cb-451f-92f9-0ea27a639227=user_id%3A0; _vwo_uuid_v2=D89C4E5A4C793E7FD971C7EF45DAD0F8D|16f26569e80a79df16454d5e5dab9f1b; _pk_ses.100001.3ac3=*; _pk_ref.100001.3ac3=%5B%22%22%2C%22%22%2C1608454238%2C%22https%3A%2F%2Fsearch.douban.com%2Fbook%2Fsubject_search%3Fsearch_text%3Dpython%26cat%3D1001%26start%3D15%22%5D; __gads=ID=0442d04dc03764fc-221dbf0348c500b0:T=1608454237:RT=1608454237:S=ALNI_MZ2E-ogYbJeB3BcZ3nBcnd4BdC0KA; __utma=30149280.210949746.1608454123.1608454123.1608454802.2; __utmz=30149280.1608454802.2.2.utmcsr=baidu|utmccn=(organic)|utmcmd=organic; __utma=81379588.1205090590.1608454238.1608454238.1608454802.2; __utmz=81379588.1608454802.2.2.utmcsr=baidu|utmccn=(organic)|utmcmd=organic; __yadk_uid=DUTmPibcCXxPckDBmdxrLMrDLRDlfGza; __utmt_douban=1; __utmt=1; gr_session_id_22c937bbd8ebd703f2d8e9445f7dfd03=2a8ec986-5f2c-4490-ad66-22f413dd8bc5; gr_cs1_2a8ec986-5f2c-4490-ad66-22f413dd8bc5=user_id%3A0; __utmc=30149280; __utmc=81379588; gr_session_id_22c937bbd8ebd703f2d8e9445f7dfd03_2a8ec986-5f2c-4490-ad66-22f413dd8bc5=true; __utmb=30149280.13.10.1608454802; __utmb=81379588.13.10.1608454802; _pk_id.100001.3ac3=3ec66a091e7f2986.1608454238.1.1608456068.1608454238.',
        "Host": "book.douban.com",
        "Referer": "https://book.douban.com/",
        "Sec-Fetch-Dest": "document",
        "Sec-Fetch-Mode": "navigate",
        "Sec-Fetch-Site": "same-origin",
        "Sec-Fetch-User": "?1",
        "Upgrade-Insecure-Requests": "1",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36"
    }

    def parse(self, response):
        book_url = response.xpath('//li[@class="subject-item"]/div[@class="info"]/h2/a/@href').getall()
        for url in book_url:
            yield scrapy.Request(url=url, headers=self.headers, callback=self.get_book_detail, dont_filter=False)

    def get_book_detail(self, response):
        try:
            message = ''.join(response.xpath('//div[@id="info"]//text()').extract()).split()
            author = message[message.index("作者:") + 1]
            publish = message[message.index("出版社:") + 1].strip()
            year = message[message.index("出版年:") + 1].strip()
            page = message[message.index("页数:") + 1].strip()
            price = message[message.index("定价:") + 1].strip('元').strip()
            make_up = message[message.index("装帧:") + 1].strip()
            ISBN = ""
            if "ISBN:" in message:
                ISBN = message[message.index("ISBN:") + 1].strip()
            book_title = response.xpath('//div[@id="wrapper"]/h1/span/text()').extract_first().strip()
            series = response.xpath('//div[@id="info"]//a/text()').extract()[1].strip()
            abstract = ''.join(response.xpath('//div[@class="intro"]/p//text()').extract()).strip()
            self.item["book_title"] = book_title
            self.item["series"] = series
            self.item["abstract"] = abstract
            self.item["author"] = author
            self.item["publish"] = publish
            self.item["year"] = year
            self.item["page"] = page
            self.item["price"] = price
            self.item["make_up"] = make_up
            self.item["ISBN"] = ISBN
            self.item["url"] = response.url
        except:
            pass
        return self.item
