import pymysql
import time
import traceback
import requests
import json

from selenium.webdriver import Chrome, ChromeOptions
from bs4 import BeautifulSoup

from config import *


def get_tencent_data():
    header = {'User-Agent':
                  r'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36 Edg/97.0.1072.62'}
    url = 'https://api.inews.qq.com/newsqa/v1/query/inner/publish/modules/list?modules=chinaDayList,chinaDayAddList,diseaseh5Shelf,provinceCompare,diseaseh5Shelf'
    res = requests.get(url, headers=header).json()

    data = res['data']

    history = {}
    for i in data['chinaDayList']:
        ds = i['y'] + '.' + i['date']
        tup = time.strptime(ds, '%Y.%m.%d')
        ds = time.strftime('%Y-%m-%d', tup)
        history[ds] = {'confirm': i['confirm'],
                       'suspect': i['suspect'],
                       'heal': i['heal'], 'dead': i['dead']}

    for i in data['chinaDayAddList']:
        ds = i['y'] + '.' + i['date']
        tup = time.strptime(ds, '%Y.%m.%d')
        ds = time.strftime('%Y-%m-%d', tup)
        if ds not in history.keys():
            continue
        history[ds].update({'confirm_add': i['confirm'],
                            'suspect_add': i['suspect'],
                            'heal_add': i['heal'], 'dead_add': i['dead']})

    details = []
    update_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
    data_province = data['diseaseh5Shelf']['areaTree'][0]['children']
    for pro_infos in data_province:
        province = pro_infos['name']
        for city_infos in pro_infos['children']:
            city = city_infos['name']
            confirm = city_infos['total']['confirm']
            confirm_add = city_infos['today']['confirm']
            heal = city_infos['total']['heal']
            dead = city_infos['total']['dead']
            details.append([update_time, province, city, confirm,
                            confirm_add, heal, dead])
    return {'history':history, 'details':details}


def insert_history(data:dict):
    try:
        print(f'{time.asctime()} 开始插入数据')
        cursor = db.cursor()
        for k, v in data.items():
            sql_query = f"insert into history values('{k}',{v['confirm']},{v['confirm_add']}," \
                        f"{v['suspect']},{v['suspect_add']},{v['heal']},{v['heal_add']}," \
                        f"{v['dead']},{v['dead_add']})"
            print(sql_query)
            cursor.execute(sql_query)
        db.commit()
        print(f'{time.asctime()} 完成插入数据')
    except:
        traceback.print_exc()
    finally:
        cursor.close()


def update_history(data:dict):
    try:
        print(f'{time.asctime()} 开始更新历史数据')
        cursor = db.cursor()
        sql = 'select confirm from history where ds=%s'
        for k, v in data.items():
            if len(v.keys()) != 8:
                continue
            if not cursor.execute(sql, k):
                sql_query = f"insert into history values('{k}',{v['confirm']},{v['confirm_add']}," \
                            f"{v['suspect']},{v['suspect_add']},{v['heal']},{v['heal_add']}," \
                            f"{v['dead']},{v['dead_add']})"
                cursor.execute(sql_query)
        db.commit()
        print(f'{time.asctime()} 完成更新历史数据')
    except:
        traceback.print_exc()
    finally:
        if 'cursor' in locals().keys():
            cursor.close()


def update_details(data:list):
    cursor = None
    try:
        cursor = db.cursor()
        # 子查询，选中update_time字段，按照id字段的降序排列顺序，选出update_time字段第一个
        # 将返回的时间与我们传入的时间比较，相同返回1
        sql = 'select %s=(select update_time from details order by id desc limit 1)'
        # 指定插入顺序
        sql_query = f"insert into details (update_time,province,city,confirm,confirm_add," \
                    f"heal,dead) values(%s,%s,%s,%s,%s,%s,%s)"
        # print(data[0][0])
        cursor.execute(sql, data[0][0]) #对比最大时间戳
        result = cursor.fetchone()[0]
        if not result:
            print(f'{time.asctime()} 开始更新数据')
            for item in data:
                cursor.execute(sql_query, item)
            db.commit()
            print(f'{time.asctime()} 完成更新数据')
        else:
            print(f'{time.asctime()} 已是最新数据')
    except:
        traceback.print_exc()
    finally:
        if cursor:
            cursor.close()


def getBaiduData():
    option = ChromeOptions()
    option.add_argument('--headless')  # 隐藏浏览器

    browser = Chrome(options=option)
    url = 'https://www.baidu.com/s?tn=news&rtt=1&bsst=1&wd=%E7%96%AB%E6%83%85%E6%96%B0%E9%97%BB&cl=2'
    browser.get(url)

    xpath = '//*[@id]/div/div/div/span[2]'
    elements = browser.find_elements(by='xpath', value=xpath)
    content = [element.text for element in elements]
    browser.close()
    return content

def updateHotSearch():
    cursor = None
    try:
        content = getBaiduData()
        print(f'{time.asctime()} 开始更新热搜数据')
        cursor = db.cursor()
        sql = 'insert into hotsearch (id,dt,content) values(%s,%s,%s)'
        #sql = "update hotsearch set dt = %s, content = %s  where id = %s"
        ts = time.strftime('%Y-%m-%d %X')
        cout = 1
        for i in content:
            cursor.execute(sql, (ts, i, cout))
            cout = cout + 1
        db.commit()
        print(f'{time.asctime()} 热搜数据更新完毕')
    except:
        traceback.print_exc()
    finally:
        if cursor:
            cursor.close()

def getNews():     #获取新闻标题
    option = ChromeOptions()
    option.add_argument('--headless')  # 隐藏浏览器

    browser = Chrome(options=option)
    url = 'https://www.baidu.com/s?tn=news&rtt=1&bsst=1&wd=%E7%96%AB%E6%83%85%E6%96%B0%E9%97%BB&cl=2'
    browser.get(url)

    xpath = '//*[@id]/div/h3/a'
    elements = browser.find_elements(by='xpath', value=xpath)
    t = []
    l = []
    for i in range(0, len(elements)):
        t.append(elements[i].text)
        l.append(elements[i].get_attribute('href'))
    newscontent = {}
    for i in range(0, len(elements)):
        newscontent[t[i]] = l[i]
    browser.close()
    return newscontent

def updateNews():
    cursor = None
    try:
        content = getNews()
        print(f'{time.asctime()} 开始更新新闻数据')
        cursor = db.cursor()
        cout = 1
        sql = 'insert into news (id,title,href) values(%s,%s,%s)'
        #sql = 'update news set title = %s, href = %s where id = %s'
        for key, value in content.items():
            cursor.execute(sql, (key, value, cout))
            cout = cout + 1
        db.commit()
        print(f'{time.asctime()} 新闻数据更新完毕')
    except:
        traceback.print_exc()
    finally:
        if cursor:
            cursor.close()

#获取虚假消息
def get_fakes():
    url = "http://www.piyao.org.cn/2020yqpy/" #中国互联网辟谣平台
    option = ChromeOptions()
    option.add_argument("--headless")

    browser = Chrome(options=option)
    browser.get(url)
    # browser.find_element('/html/body/div[2]/div[1]/div').click()
    # time.sleep(1)
    xpath = '/html/body/div[2]/div[1]/div/ul/li/a'
    fakes = browser.find_elements(by='xpath', value=xpath)

    t = []
    l = []
    for i in range(0, len(fakes)):
        t.append(fakes[i].text)
        l.append(fakes[i].get_attribute('href'))

    # for j in range(0,len(links)):
    #    l.append(links[j].get_attribute('href'))

    fake_data = {}
    for i in range(0, len(fakes)):
        fake_data[t[i]] = l[i]
    browser.close()
    return fake_data

#更新虚假消息
def update_fakes():
    cursor = None
    try:
        context = get_fakes()
        print(f"{time.asctime()}开始更新谣言数据")
        cursor = db.cursor()
        cout = 1
        sql = "insert into fakes(title,href,id) values(%s,%s,%s)" # 第一次需要插入数据
        #sql = "update fakes set title = %s, href = %s  where id = %s"
        for key,value in context.items():
            cursor.execute(sql, ( key, value,cout))
            cout = cout + 1
        db.commit()
        print(f"{time.asctime()}谣言数据更新完毕")
    except:
        traceback.print_exc()
    finally:
        if cursor:
            cursor.close()

#获取海外消息
def get_oversea_news():
    url = "https://news.ifeng.com/c/special/7uLj4F83Cqm"
    option = ChromeOptions()
    option.add_argument("--headless")

    browser = Chrome(options=option)
    browser.get(url)

    # button = browser.find_element('#root > div > div.index_info_box1_kfCcq > div.index_list_NG--Y > div:nth-child(1) > div > div > div.index_link_xs9eQ > a')
    # button.click()
    xpath = '//*[@id="root"]/div/div[10]/div[1]/div/div/div/div[1]/a'
    news = browser.find_elements(by='xpath', value=xpath)

    t = []
    l = []
    for i in range(0, len(news)):
        t.append(news[i].text)
        l.append(news[i].get_attribute('href'))
    # for j in range(0,len(links)):
    #    l.append(links[j].get_attribute('href'))
    overseas = {}
    for i in range(0, len(news)):
        overseas[t[i]] = l[i]
    browser.close()
    return overseas

def update_oversea():
    cursor = None
    conn = None
    try:
        context = get_oversea_news()
        print(f"{time.asctime()}开始更新Oversea数据")
        cursor = db.cursor()
        cout = 1
        sql = "insert into oversea(title,href,id) values(%s,%s,%s)"  # 第一次需要插入数据
        #sql = "update oversea set title = %s, href = %s  where id = %s"
        for key, value in context.items():
            cursor.execute(sql, (key, value, cout))
            cout = cout + 1
        db.commit()
        print(f"{time.asctime()}Oversea数据更新完毕")
    except:
        traceback.print_exc()
    finally:
        if cursor:
            cursor.close()

db = pymysql.connect(host=HOST, user=USER, passwd=PASSWORD, database=DATABASE)
data = get_tencent_data()
update_history(data['history'])
update_details(data['details'])
updateHotSearch()
updateNews()
update_fakes()
update_oversea()
db.close()

