import requests
import re  # python中的正则表达式（re模块）
import time
import pymysql as MySQLdb


class SsqSpider:
    def __init__(self):
        # 请求头
        self.headers = {
            "Host": "kaijiang.zhcw.com",
            "Referer": "http://kaijiang.zhcw.com/zhcw/html/ssq/list_6.html",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.121 Safari/537.36",
        }

    def _get_html(self, url):
        """
        获取网页源码
        :param url: 网页地址
        :return: 网页源码
        """
        reponse = requests.get(url=url, headers=self.headers)
        reponse.encoding = 'utf-8'
        return reponse.text

    def _get_total_page(self, html):
        """
        获取总页数
        :return: 末页页码
        """
        total_page = None
        try:
            total_page = re.findall(r"class=\"pg\".*?<strong>(.*?)</strong>", html)[0]
            total_page = int(total_page)
        except:
            print("未能正常获取到总页数")
        return total_page

    def get_total_page_size(self):
        """
        获取原始网页的数据总数
        :return:
        """
        html = self._get_html('http://kaijiang.zhcw.com/zhcw/html/ssq/list_1.html')
        total_page_size = None
        try:
            total_page_size = re.findall(r"class=\"pg\".*?<strong>.*?</strong>.*?<strong>(.*?)</strong>", html)[0]
            total_page_size = int(total_page_size)
        except:
            print("未能正常获取到总记录数")
        return total_page_size

    def _get_ssq_num_generator(self, total_page):
        """
        获取每页的双色球数据
        :param total_page:最大页数
        :return: 每页双色球数据
        """
        for page_num in range(1, total_page + 1):
            url = "http://kaijiang.zhcw.com/zhcw/html/ssq/list_" + str(page_num) + ".html"
            html = self._get_html(url)
            time.sleep(2)
            # 爬取规则
            rule = r"<tr>.*?<td align=\"center\">(.*?)</td>.*?<td align=\"center\">(.*?)</td>.*?<td align=\"center\" style=\"padding-left:10px;\">.*?<em class=\"rr\">(.*?)</em>.*?<em class=\"rr\">(.*?)</em>.*?<em class=\"rr\">(.*?)</em>.*?<em class=\"rr\">(.*?)</em>.*?<em class=\"rr\">(.*?)</em>.*?<em class=\"rr\">(.*?)</em>.*?<em>(.*?)</em></td>"
            num_list = re.findall(rule, html, re.S | re.M)  # 得到双色球号码
            # ('2020-10-27', '2020106', '02', '04', '11', '15', '18', '28', '10')
            # (开奖日期, 期号, 红球号码1, 红球号码2, 红球号码3, 红球号码4, 红球号码5, 红球号码6, 篮球号码)
            for num in num_list:
                result = (
                    num[0], num[1], "{},{},{},{},{},{}".format(num[2], num[3], num[4], num[5], num[6], num[7]), num[8])
                yield result

    def _db_insert_ssq(self, ssq_tuple):
        '''
        连接数据库，提交获取到的期号、截止时间和系统时间
        '''
        conn = MySQLdb.connect(
            host='localhost',
            port=3306,
            user='root',
            passwd='root',
            db='ssq',
            charset='utf8'
        )
        cur = conn.cursor()
        try:
            cur.execute("SELECT	 public_date FROM ssq ORDER BY public_date DESC")
            results = cur.fetchall()
            for result in results:
                if ssq_tuple[0] == result[0]:
                    print('{}的数据已存在'.format(ssq_tuple[0]))
                    break
            else:
                sql_insert = "insert into ssq(public_date,public_num,red_ball,blue_ball) VALUES (%s,%s,%s,%s)"
                cur.execute(
                    sql_insert, ssq_tuple
                )
                conn.commit()
                print("数据插入成功:{}".format(ssq_tuple))
        except Exception as e:
            print(e)
        finally:
            cur.close()
            conn.close()

    def run(self):
        """
        运行爬虫程序
        """
        url = "http://kaijiang.zhcw.com/zhcw/html/ssq/list_1.html"
        html = self._get_html(url)  # 获取网页源码
        total_page = self._get_total_page(html)  # 获取最大页码
        # 保存每页双色球数据
        for ssq_tuple in self._get_ssq_num_generator(total_page):
            # self._db_insert_ssq(ssq_tuple)
            # print(ssq_tuple)
            # break
            yield ssq_tuple


if __name__ == '__main__':
    spider = SsqSpider()
    for i in spider.run():
        print(i)
        break
