from lxml import etree
import re

import feapder


class Spider(feapder.AirSpider):
    __custom_setting__ = dict(
        ITEM_PIPELINES=["feapder.pipelines.mongo_pipeline.MongoPipeline"],
        MONGO_IP="server.windows",
        MONGO_PORT=27017,
        MONGO_DB="feapder",
        MONGO_USER_NAME="root",
        MONGO_USER_PASS="141535",
    )

    # 初始任务下发入口
    def start_requests(self):
        # 请求分类菜单
        yield feapder.Request("https://product.dangdang.com/659434820.html", callback=self.parse_book,
                              download_midware=self.download_midware)

    def download_midware(self, request):
        """
        我是自定义的下载中间件
        :param request:
        :return:
        """
        # request.headers = {
        #     "Accept": "application/json, text/javascript, */*; q=0.01",
        #     "Accept-Encoding": "gzip, deflate",
        #     "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
        #     "Connection": "keep-alive",
        #     "Cookie": "secret_key=a9c3d30d6003fcd5327b8bffe775d819; __permanent_id=20211011104011980133198650055284598; permanent_key=20211011104011610885122984d84c50; dangdang.com=email=MTMwMTEwMjg5ODM0OTUyMUBkZG1vYmlscGhvbmVfX3VzZXIuY29t&nickname=&display_id=2626170339260&customerid=uOgMhy1W6Nm3wv8uEy8j7w==&viptype=gp2ucVP/BGU=&show_name=130%2A%2A%2A%2A8983; ddscreen=2; __visit_id=20211012094026056405484270785494093; __out_refer=1634002826%7C!%7Ccn.bing.com%7C!%7C; dest_area=country_id%3D9000%26province_id%3D111%26city_id%3D0%26district_id%3D0%26town_id%3D0; pos_9_end=1634002865286; alipay_request_from=https://login.dangdang.com/signin.aspx?returnurl=http%253A%252F%252Fproduct.dangdang.com%252F25121739.html; __rpm=s_605253.451680112839%2C451680112840.1.1634002866351%7Clogin_page.login_nolocal_mobile_div..1634002922813; USERNUM=rj1vnsSi4gNUew1LGOfFCg==; login.dangdang.com=.AYH=2021101209410710073191856&.ASPXAUTH=2wxqXq76n2tspr4828biaCflU8y2Au1AF8c0fWQUVB2fx4y92pQwKA==; ddoy=email=1301102898349521%40ddmobilphone__user.com&nickname=&agree_date=1&validatedflag=0&uname=13011028983&utype=1&.ALFG=on&.ALTM=1634002923; sessionID=pc_7a612ae629ad9c491d19bf9ddc3a5c3d89e6d3844f5f3f44dbbefb7b28b7c94e; __dd_token_id=2021101209420379540719045375b11e; LOGIN_TIME=1634002924063; pos_6_start=1634002924151; pos_6_end=1634002924698; __trace_id=20211012094225187111205257475970948",
        #     "Host": "product.dangdang.com",
        #     "Referer": "http://product.dangdang.com/25121739.html",
        #     "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36 Edg/94.0.992.47",
        #     "X-Requested-With": "XMLHttpRequest"
        # }

        # 这里随机取个代理使用即可
        request.proxies = {"http": "http://223.214.30.252:4267"}
        return request

    def parse_book(self, request, response):
        # print(response.text)
        list_element_price = response.xpath('//p[@id="dd-price"]/text()').extract()
        if len(list_element_price) > 0:
            price = list_element_price[1].strip()
            print("price=%s" % price)

        productId = response.re(r'productId":"(.*?)"')
        template = response.re(r'template":"(.*?)"')
        describeMap = response.re(r'describeMap":"(.*?)"')
        shopId = response.re(r'shopId":"(.*?)"')
        categoryPath = response.re(r'categoryPath":"(.*?)"')

        productId_str = "".join(productId)
        template_str = "".join(template)
        describeMap_str = "".join(describeMap)
        shopId_str = "".join(shopId)
        categoryPath_str = "".join(categoryPath)

        url = "https://product.dangdang.com/index.php?r=callback%2Fdetail&productId=" + productId_str + "&templateType=" + template_str + "&describeMap=" + describeMap_str + "&shopId=" + shopId_str + "&categoryPath=" + categoryPath_str
        yield feapder.Request(url, callback=self.parse_book2, download_midware=self.download_midware)

    def parse_book2(self, request, response):
        print("========" + request.url)
        json = response.json
        data = json.get("data")
        html = data['html']
        parseHtml = etree.HTML(html)
        print(html)
        recommended_reason = ""
        list_recommended_reason = parseHtml.xpath('//div[@id="abstract"][1]/div[@class="descrip"][1]/p[1]/text()')
        for p in list_recommended_reason:
            recommended_reason += p
        print(recommended_reason)

        description = ""
        list_description = parseHtml.xpath('//div[@id="content"][1]/div[@class="descrip"][1]/p[1]/text()')
        for p in list_description:
            description += p
        print(description)

        media_comment = ""
        list_media_comment = parseHtml.xpath('//span[@id="mediaFeedback-show"][1]/text()')
        for p in list_media_comment:
            media_comment += p
        print(media_comment)


if __name__ == "__main__":
    # thread_count 为线程数
    Spider(thread_count=1).start()
