# -*- coding: utf-8 -*-

"""
DateTime   : 2021/04/13 10:16
Author     : ZhangYafei
Description: 中国福利彩票
http://www.cwl.gov.cn/kjxx/ssq/kjgg/
"""
import json
from concurrent import futures
from concurrent.futures import Future

from openpyxl import workbook
from pandas import DataFrame
from requests import Session
from zyf.timer import timeit


class CwlGov:
    def __init__(self):
        self.url = 'http://www.cwl.gov.cn/cwl_admin/kjxx/findDrawNotice'
        headers = {
            'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36 Edg/89.0.774.68',
            'Referer': 'http://www.cwl.gov.cn/kjxx/ssq/kjgg/',
        }
        self.session = Session()
        self.session.headers = headers
        self.columns = [
            '彩票类型', '期号', '开奖日期', '红球', '蓝球',
            '总销售额', '奖池', '一等奖注数', '一等奖中奖金额', '一等奖中奖情况',
            '二等奖注数', '二等奖中奖金额', '三等奖注数', '三等奖中奖金额', '详细中奖信息',
            '详细页面链接', '开奖视频链接'
        ]
        self.table_name = '双色球'
        self.data_list = []
        self.filepath = '中国福利彩票.xlsx'

    def spider(self, start: str, end: str, page_num: int = None):
        params = {
            'name': 'ssq',
            'dayStart': start,
            'dayEnd': end,
            'pageNo': page_num,
        }
        return self.session.get(self.url, params=params)

    def parse(self, response):
        if isinstance(response, Future):
            response = response.result()
        data = response.json()
        result = data['result']
        for res in result:
            code = res['code']
            content = res['content']
            date = res['date']
            detailLink = res['detailsLink']
            name = res['name']
            pool_money = res['poolmoney']
            red = res['red']
            blue = res['blue']
            sales = res['sales']
            videoLink = res['videoLink']
            prizegrades = res['prizegrades']
            first_prize_num = prizegrades[0]['typenum']
            first_prize_money = prizegrades[0]['typemoney']
            second_prize_num = prizegrades[1]['typenum']
            second_prize_money = prizegrades[1]['typemoney']
            third_prize_num = prizegrades[2]['typenum']
            third_prize_money = prizegrades[2]['typemoney']
            prizegrades = json.dumps(prizegrades)
            self.data_list.append(
                [name, code, date, red, blue, sales, pool_money, first_prize_num, first_prize_money, content,
                 second_prize_num, second_prize_money, third_prize_num, third_prize_money, prizegrades, detailLink,
                 videoLink])

    @timeit
    def save_to_excel(self):
        # 创建一个工作表
        wb = workbook.Workbook()
        # 创建一个sheet
        sheet = wb.worksheets[0]
        sheet.title = self.table_name
        # 添加表头
        sheet.append(self.columns)
        for item in self.data_list:
            sheet.append(item)
        wb.save(self.filepath)

    @timeit
    def save_to_excel_with_pandas(self):
        df = DataFrame(columns=self.columns, data=self.data_list)
        df.sort_values('期号', ascending=False, inplace=True)
        df.to_excel(self.filepath, index=False, sheet_name=self.table_name)

    @timeit
    def run(self, start='2013-01-01', end='2021-04-13', use_pool: bool = True):
        print('start make request ……')
        response = self.spider(start=start, end=end)
        self.parse(response)
        data = response.json()
        page_count = data['pageCount']
        if use_pool:
            from concurrent.futures import ThreadPoolExecutor
            with ThreadPoolExecutor(max_workers=page_count - 1) as pool:
                futures_list = []
                for page in range(2, page_count + 1):
                    future = pool.submit(self.spider, start, end, page)
                    future.add_done_callback(self.parse)
                    futures_list.append(future)
                for future in futures.as_completed(futures_list):
                    if future.exception():
                        print(future.exception())
            self.save_to_excel_with_pandas()
        else:
            for page in range(2, page_count + 1):
                response = self.spider(start=start, end=end, page_num=page)
                self.parse(response)
            self.save_to_excel()


if __name__ == '__main__':
    cwl_gov = CwlGov()
    cwl_gov.run(use_pool=True)
