import json

import scrapy
from scrapy import Request

from ssqInfo.items import SSQItem


class FcssqSpider(scrapy.Spider):
    name = 'fcssq'
    allowed_domains = ['cwl.gov.cn']
    # start_urls = ['http://www.cwl.gov.cn/cwl_admin/kjxx/findDrawNotice?name=ssq&issueCount=100']
    start_urls = [
        'http://www.cwl.gov.cn/cwl_admin/kjxx/findDrawNotice?name=ssq&issueCount=&issueStart=2003002&issueEnd=2021099&dayStart=&dayEnd=&pageNo=']

    headers = {
        "Accept": "application/json, text/javascript, */*; q=0.01",
        "Accept-Encoding": "gzip, deflate",
        "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-US;q=0.7",
        "Connection": " keep-alive",
        "Cookie": " Sites=_21; UniqueID=bWxa4KHPR4nTEGB71619143528907; _ga=GA1.3.1822056007.1619143530; _gid=GA1.3.1965082356.1619143530; 21_vq=10",
        "DNT": "1",
        "Host": "www.cwl.gov.cn",
        "Referer": "http://www.cwl.gov.cn/kjxx/ssq/kjgg/",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36",
        "X-Requested-With": "XMLHttpRequest"
    }

    def start_requests(self):

        custom_urls = []
        for url in self.start_urls:
            custom_urls.append(Request(url, headers=self.headers))
        return custom_urls

    def parse(self, response):
        print("begin parse")
        print(response.text)
        data = json.loads(response.body.decode('utf-8'))
        page_count = data['pageCount']
        print(page_count)
        if page_count > 1:
            for j in range(2, page_count+1):
                url = self.start_urls[0]+str(j)
                print(url)
                yield scrapy.Request(url, headers=self.headers, callback=self.parse2)

        result = data['result']
        ll = len(result)
        for i in range(0, ll):
            ssq = SSQItem()
            ssq['name'] = result[i]["name"]
            ssq['code'] = result[i]["code"]
            ssq['date'] = result[i]["date"]
            ssq['week'] = result[i]["week"]
            ssq['red'] = result[i]["red"]
            ssq['blue'] = result[i]["blue"]
            ssq['blue2'] = result[i]["blue2"]
            ssq['sales'] = result[i]["sales"]
            ssq['content'] = result[i]["content"]
            ssq['poolmoney'] = result[i]["poolmoney"]
            ssq['msg'] = result[i]["msg"]
            # 返回信息
            yield ssq
        pass

    def parse2(self, response):
        print("begin parse2")
        print(response.text)
        data = json.loads(response.body.decode('utf-8'))
        page_count = data['pageCount']
        result = data['result']
        ll = len(result)
        for i in range(0, ll):
            ssq = SSQItem()
            ssq['name'] = result[i]["name"]
            ssq['code'] = result[i]["code"]
            ssq['date'] = result[i]["date"]
            ssq['week'] = result[i]["week"]
            ssq['red'] = result[i]["red"]
            ssq['blue'] = result[i]["blue"]
            ssq['blue2'] = result[i]["blue2"]
            ssq['sales'] = result[i]["sales"]
            ssq['content'] = result[i]["content"]
            ssq['poolmoney'] = result[i]["poolmoney"]
            ssq['msg'] = result[i]["msg"]
            # 返回信息
            yield ssq
        pass
