import scrapy
import re
import time
from scrapy_splash.request import SplashRequest
from CrawlJingDong.items import CrawljingdongItem


class JdSpider(scrapy.Spider):
    name = 'jd'
    allowed_domains = ['jd.com']
    keyword = ["水果 苹果", "大米", "梨", "黄米", "玉米"]
    url = 'https://search.jd.com/Search?keyword=%s&enc=utf-8&qrst=1&rt=1&stop=1&vt=2&wq=%s&stock=1&page=%d&s=353&click=0'
    script = '''function main(splash, args)
                  splash:go(args.url)
                  local scroll_to = splash:jsfunc("window.scrollTo")
                  scroll_to(0, 2800)
                  splash:set_viewport_full()
                  splash:wait(2)
                  return {
                    html = splash:html(),
                  }
                end'''

    def start_requests(self):
        for key in self.keyword:
            for i in range(1, 100, 2):
                yield SplashRequest(self.url % (key, key, i), callback=self.parse, endpoint='execute',
                                    args={
                                        'lua_source': self.script,
                                    })

    def parse(self, response):
        for li in response.css('div#J_goodsList li.gl-item'):
            detail_url = 'https:' + li.css('.p-img>a::attr(href)').get()
            yield SplashRequest(detail_url, callback=self.parse_detail, args={'wait': 1})

    def parse_detail(self, response):
        item = CrawljingdongItem()
        name_div = response.css('.itemInfo-wrap div.sku-name').get()
        match_obj = re.search(r'<div.*?>([\s\S]*?)</div>', name_div)
        if match_obj:
            item['name'] = re.sub(r'<img.*?>', '', match_obj.group(1)).strip()
        else:
            item['name'] = None
        item['img']=response.css('#spec-n1 img::attr(data-origin)').get()
        item['comment'] = response.css('div#comment-count a::text').get()
        item['price'] = response.css('.p-price>span.price::text').get()
        item['level_one'] = response.css('.crumb.fl.clearfix>.item.first>a::text').get()
        item['shop'] = response.css('.J-hove-wrap.EDropdown.fr div.name a::text').get()
        item['level_two'] = response.css('.crumb.fl.clearfix>.item:nth-child(3)>a::text').get()
        item['url'] = response.url
        infos = []
        for info in response.css('div.p-parameter .parameter2.p-parameter-list li::text').getall():
            infos.append(info)
        item['detail'] = '#'.join(infos)
        yield item
