# -*- coding: utf-8 -*-
import scrapy
import urllib
import urllib.request
import re
from scrapy.http import Request
from ArticleSpider.items import SinabotItem

class SinaSpider(scrapy.Spider):
    name = 'sina'
    allowed_domains = ['sina.com.cn']
    start_urls = ['http://news.sina.com.cn/world']

    def parse(self, response):

        base_fmat_f = 'http://api.roll.news.sina.com.cn/zt_list?channel=news&cat_1=gjxw'
        base_fmat_m = '&level==1||=2&show_ext=1&show_all=1&show_num=22&tag=1&format=json&page='
        base_fmat_l = '&callback=newsloadercallback'

        cates = [
            ""
            , "&cat_2==hqqw||=gjmtjj"
            , "&cat_3=gj-oz"
            , "&cat_3=gj-mz"
            , "&cat_3=gj-yz"
        ]

        min_page_num = 51
        max_page_num = 150
        waiting_urls = []
        items = []

        for cate in cates:
            for p in range(min_page_num,max_page_num):
                try:
                    URL = base_fmat_f + cate +base_fmat_l + str(p) + base_fmat_l
                    waiting_urls.append(URL)
                except:
                    print("Index limited!")
                    pass

        opener = urllib.request.build_opener()
        urllib.request.install_opener(opener)

        for URL in waiting_urls:
            data = urllib.request.urlopen(URL).read()
            data = data.decode(encoding = "UTF-8")
            URL_pat = 'http:\\/\\\/news.sina.com.cn\\\/.*?.shtml'
            urls = re.complie(URL_pat).findall(data)

            for item in items:
                if not (item["Url"] == ''):
                    yield Request(url=item["Url"],meta={"item_l":item},callback=self.parse_detail)

    def parse_detail(self,response):
        item = response.meta['item_l']
        title = response.css(".main-title::text").extract_first("")
        content_arr = response.xpath('//div[@id="article"]/p/text()').extract()  # 文章正文（字符数组）
        content = (',').join(content_arr)
        item['Head'] = title
        item['Content'] = content
        yield item

