# -*- coding: utf-8 -*-
import re
import scrapy
from scrapy.http import Request
from myproject.items import Dj_Jd_Item
import os
import time

class DjJdSpider(scrapy.Spider):
    name = 'dj_jd'
    allowed_domains = ['dj.jd.com']
    start_urls = ['https://dj.jd.com/search.html/']
    handle_httpstatus_list = [404, 501, 502, 503, 403]

    def parse(self, response):
        print(1)
        for i in range(13):
            num = i+1
            data = {
                'ajax' : 'ajax',
                'searchKey':'',
                'projectStatus':'-1',
                'projectModel':'-1',
                'financingModel':'0',
                'pageNo':str(num),
                'pageCount':'12'
            }
            yield scrapy.FormRequest('https://dj.jd.com/search.html', method='POST', formdata=data, callback=self.get_item, dont_filter=True)
            time.sleep(2)

    def get_item(self,response):
        p = re.compile(r'\d+')
        list = response.css('.project-results  .project-item')
        if (list):
            for li in list:
                items_status = li.css('.project-status::text').extract()[1]
                if items_status:
                    items_name = li.css('.project-title  a::text').extract_first()
                    items_num = li.css('.project-status-detail .project-finish  dd span::text').extract_first()
                    items_per = li.css('.process-text::text').extract()
                    per = p.findall(''.join(items_per))
                    items_type = li.css('.project-title .project-type::text').extract_first()
                    item = Dj_Jd_Item()
                    item['items_name'] = items_name
                    item['items_num'] = items_num
                    item['items_per'] = per
                    item['items_type'] = items_type
                    item['items_status'] = items_status
                    yield item

