# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html

import scrapy
import tools
from scrapy.http import Request

class IndexSpider(scrapy.Spider):
    name = "sina"
    # allowed_domains = ["dmoz.org"]
    url = "http://interface.sina.cn/wap_api/layout_col.d.json?level=1%2C2&show_num=1&page=1&act=more&jsoncallback=callbackFunction&_=1515646287637&callback=Zepto1515646214377&col=5626"
    start_urls = [
        url + "1"
    ]
    category = 1
    count = 10

    def parse2(self,response):
        basePath = '//article[@class="art_box"]'
        titlePath = './/h1[@class="art_tit_h1"]/text()'
        contentPath = './/p[@class="art_p"]'
        for quote in response.xpath(basePath):
            item = {
                'title': quote.xpath(titlePath).extract_first(),
                'content': ''.join(quote.xpath(contentPath).extract())
            }
            print('----guonei2----------OK OK OK OK OK：', type(item), '-----------------')
            yield item

    def parse(self, response):
        result = tools.get_json_data(response.body_as_unicode())
        print('-------guonei-------OK OK OK OK OK：', type(result), '-----------------')
        for item in result['result']['data']['list']:
            data = {
                'name': item['_id'],
                'title': item['title'],
                'category_id': self.category,
                'source': item['source'],
                'create_time': item['cdateTime']
            }
            if len(item['allPics']['pics']) > 0:
                data['cover'] = item['allPics']['pics'][0]['imgurl']
            # else:
            #     data['cover'] = u''
            yield data

            if item['URL']:
                yield Request(item['URL'], callback=self.parse2)

            # self.count -= 1
            # if self.count == 0:
            #     return
        self.category += 1
        if self.category > 9:
            return
        url = self.url + str(self.category)
        yield Request(url, callback=self.parse)