import random
import requests
from bs4 import BeautifulSoup
from datetime import datetime
import re
import json
import pymysql


class DatabaseAccess():
#初始化属性
    def __init__(self):
        self.__db_host = "127.0.0.1"
        self.__db_port = 3306
        self.__db_user = "账户"
        self.__db_password = "密码"
        self.__db_database = "db_sports"
#链接数据库
    def isConnectionOpen(self):
        self.__db = pymysql.connect(
            host=self.__db_host,
            port=self.__db_port,
            user=self.__db_user,
            password=self.__db_password,
            database=self.__db_database,
            charset='utf8'
        )
#插入数据
    def linesinsert(self,arr):
        arrs=arr
        news=[]
        for i in range(arrs.__len__()):
            news.append([arrs[i]['title'],arrs[i]['link'],arrs[i]['date'][0]+"-"+arrs[i]['date'][1]+"-"+arrs[i]['date'][2]])
        try:
            #连接数据库
            self.isConnectionOpen()
            # 创建游标
            global cursor
            cursor = self.__db.cursor()
            # sql命令
            sql = "INSERT INTO `db_sports`.`sina_news` (`title`, `link`, `date`) VALUES (%s,%s,%s)"
            # 执行sql命令
            cursor.execute("TRUNCATE TABLE sina_news")
            cursor.executemany(sql,news)
        except Exception as e:
            print(e)
        finally:
            # 关闭游标
            cursor.close()
            # 提交
            self.__db.commit()
            # 关闭数据库连接
            self.__db.close()

    def removeAll(self):
        try:
            #连接数据库
            self.isConnectionOpen()
            # 创建游标
            global cursor
            cursor = self.__db.cursor()
            # 执行sql命令
            cursor.execute("TRUNCATE TABLE sina_news")
        except Exception as e:
            print(e)
        finally:
            # 关闭游标
            cursor.close()
            # 提交
            self.__db.commit()
            # 关闭数据库连接
            self.__db.close()

    def data_update(self,arr):
        self.removeAll()
        self.linesinsert(arr)

class News():
    def getHotSport(self):
        news = []
        header = {
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36"}
        # 新建数组存放热搜榜
        hot_url = 'https://sports.sina.com.cn/'
        # 热搜榜链接
        r = requests.get(hot_url, header)
        r.encoding = "utf-8"
        # 向链接发送get请求获得页面
        soup = BeautifulSoup(r.text, 'lxml')
        # 解析页面
        imgStr = soup.select('#ty-top-ent0 > div > div > div > div > a > img')
        urls_titles = soup.select('#ty-top-ent0 > div > h3 > a')
        year = datetime.now()
        for item in urls_titles:
            result = {
                "title": item.get_text(),
                "link": item.get('href'),
                'date': re.findall('\d+', item.get('href'))
            }
            if (len(result.get("date")) == 4 and result.get("title").find('\n') < 0 and
                    result.get("date")[0]+result.get("date")[1]+result.get("date")[2] == year.strftime("%Y%m%d")):
                news.append(result)
        return news

    def getPutongNews(self):
        news = []
        header = {
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36"}
        # 新建数组存放热搜榜
        hot_url = 'https://sports.sina.com.cn/'
        # 热搜榜链接
        r = requests.get(hot_url, header)
        r.encoding = "utf-8"
        # 向链接发送get请求获得页面
        soup = BeautifulSoup(r.text, 'lxml')
        # 解析页面
        urls_titles = soup.select('a')
        year = datetime.now()
        for item in urls_titles:
            result = {
                "title": item.get_text(),
                "link": item.get('href'),
                'date': re.findall('\d+', item.get('href'))
            }
            # print(result)
            if (len(result.get("date")) == 4 and result.get("title").find('\n') < 0 and
                    result.get("date")[0]+result.get("date")[1]+result.get("date")[2] == year.strftime("%Y%m%d")):
                news.append(result)
        return news


    def getCCTVNews(self):
        header = {
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36"}
    # 新建数组存放热搜榜
        hot_url = 'https://sports.cctv.com/2019/07/gaiban/cmsdatainterface/page/remen_1.jsonp?cb=remen'
    # 热搜榜链接
        r = requests.get(hot_url, header)
        r.encoding = "utf-8"
        print(r.text)
        content = json.dumps(r.text.split('remen')[1][1:len(r.text.split('remen')[1]) - 1], ensure_ascii=False);
        news = json.loads(json.loads(content))['data']['list']
        return news


if __name__ == "__main__":  # 当程序执行时
    db=DatabaseAccess()
    news=News()
    newsList=news.getHotSport()+news.getPutongNews()
    # print(newsList)
    db.data_update(newsList)
